Compare commits

...

11 Commits

Author SHA1 Message Date
b8106354b0 Fix --git-since not detecting first file change in filtered directories
The VFS only sets up file watches via read() and read_dir(), not
metadata(). When git filtering caused snapshot_from_vfs to return
early for $path directories, read_dir was never called, so no file
watch was established. This meant file modifications never generated
VFS events and were silently ignored until the server was restarted.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-13 18:04:27 +01:00
0dc37ac848 Fix --git-since live sync not detecting changes and creating duplicates
Two issues prevented --git-since from working correctly during live sync:

1. Server: File changes weren't detected because git-filtered project nodes
   had empty relevant_paths, so the change processor couldn't map VFS events
   back to tree instances. Fixed by registering $path directories and the
   project folder in relevant_paths even when filtered.

2. Plugin: When a previously-filtered file was first acknowledged, it appeared
   as an ADD patch. The plugin created a new instance instead of adopting the
   existing one in Studio, causing duplicates. Fixed by checking for untracked
   children with matching Name+ClassName before calling Instance.new.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-13 16:19:01 +01:00
891b74b135 Merge branch 'git-track' into master 2026-02-13 14:11:17 +01:00
ari
18fdbce8b0 name-prop (#1)
Reviewed-on: #1
Co-authored-by: ari <git@astrid.email>
Co-committed-by: ari <git@astrid.email>
2026-02-13 13:09:30 +00:00
Ivan Matthew
a2adf2b517 Improves sourcemap path handling with pathdiff (#1217) 2026-02-12 19:17:28 -08:00
Micah
4deda0e155 Use msgpack for API (#1176) 2026-02-12 18:37:24 -08:00
ari
4df2d3c5f8 Add actor, bindables and remotes to json_model_classes (#1199) 2026-02-12 17:34:32 -08:00
boatbomber
4965165ad5 Add option to forget prior info for place in reminder notif (#1215) 2026-01-23 21:15:34 +00:00
boatbomber
68eab3479a Fix notification unmount thread cancel bug (#1211) 2026-01-19 16:35:19 -08:00
Ivan Matthew
2a1102fc55 Implement VFS Path normalization for improved cross-platform tree synchronization (#1201) 2026-01-19 15:04:59 -08:00
Ken Loeffler
02b41133f8 Use post for ref patch and serialize (#1192) 2026-01-19 22:44:42 +00:00
80 changed files with 1176 additions and 308 deletions

3
.gitignore vendored
View File

@@ -23,3 +23,6 @@
# Macos file system junk # Macos file system junk
._* ._*
.DS_STORE .DS_STORE
# JetBrains IDEs
/.idea/

3
.gitmodules vendored
View File

@@ -16,3 +16,6 @@
[submodule "plugin/Packages/Highlighter"] [submodule "plugin/Packages/Highlighter"]
path = plugin/Packages/Highlighter path = plugin/Packages/Highlighter
url = https://github.com/boatbomber/highlighter.git url = https://github.com/boatbomber/highlighter.git
[submodule "plugin/Packages/msgpack-luau"]
path = plugin/Packages/msgpack-luau
url = https://github.com/cipharius/msgpack-luau/

View File

@@ -30,9 +30,26 @@ Making a new release? Simply add the new header with the version and date undern
--> -->
## Unreleased ## Unreleased
* Fixed a bug caused by having reference properties (such as `ObjectValue.Value`) that point to an Instance not included in syncback. ([#1179])
* `inf` and `nan` values in properties are now synced ([#1176])
* Fixed a bug caused by having reference properties (such as `ObjectValue.Value`) that point to an Instance not included in syncback. ([#1179])
* Implemented support for the "name" property in meta/model JSON files. ([#1187])
* Fixed instance replacement fallback failing when too many instances needed to be replaced. ([#1192])
* Added actors and bindable/remote event/function variants to be synced back as JSON files. ([#1199])
* Fixed a bug where MacOS paths weren't being handled correctly. ([#1201])
* Fixed a bug where the notification timeout thread would fail to cancel on unmount ([#1211])
* Added a "Forget" option to the sync reminder notification to avoid being reminded for that place in the future ([#1215])
* Improves relative path calculation for sourcemap generation to avoid issues with Windows UNC paths. ([#1217])
[#1176]: https://github.com/rojo-rbx/rojo/pull/1176
[#1179]: https://github.com/rojo-rbx/rojo/pull/1179 [#1179]: https://github.com/rojo-rbx/rojo/pull/1179
[#1187]: https://github.com/rojo-rbx/rojo/pull/1187
[#1192]: https://github.com/rojo-rbx/rojo/pull/1192
[#1199]: https://github.com/rojo-rbx/rojo/pull/1199
[#1201]: https://github.com/rojo-rbx/rojo/pull/1201
[#1211]: https://github.com/rojo-rbx/rojo/pull/1211
[#1215]: https://github.com/rojo-rbx/rojo/pull/1215
[#1217]: https://github.com/rojo-rbx/rojo/pull/1217
## [7.7.0-rc.1] (November 27th, 2025) ## [7.7.0-rc.1] (November 27th, 2025)

View File

@@ -14,6 +14,7 @@ Code contributions are welcome for features and bugs that have been reported in
You'll want these tools to work on Rojo: You'll want these tools to work on Rojo:
* Latest stable Rust compiler * Latest stable Rust compiler
* Rustfmt and Clippy are used for code formatting and linting.
* Latest stable [Rojo](https://github.com/rojo-rbx/rojo) * Latest stable [Rojo](https://github.com/rojo-rbx/rojo)
* [Rokit](https://github.com/rojo-rbx/rokit) * [Rokit](https://github.com/rojo-rbx/rokit)
* [Luau Language Server](https://github.com/JohnnyMorganz/luau-lsp) (Only needed if working on the Studio plugin.) * [Luau Language Server](https://github.com/JohnnyMorganz/luau-lsp) (Only needed if working on the Studio plugin.)

20
Cargo.lock generated
View File

@@ -1319,6 +1319,7 @@ dependencies = [
"fs-err", "fs-err",
"notify", "notify",
"serde", "serde",
"tempfile",
] ]
[[package]] [[package]]
@@ -1519,6 +1520,12 @@ version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
[[package]]
name = "pathdiff"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3"
[[package]] [[package]]
name = "percent-encoding" name = "percent-encoding"
version = "2.3.2" version = "2.3.2"
@@ -2067,6 +2074,7 @@ dependencies = [
"num_cpus", "num_cpus",
"opener", "opener",
"paste", "paste",
"pathdiff",
"pretty_assertions", "pretty_assertions",
"profiling", "profiling",
"rayon", "rayon",
@@ -2077,10 +2085,12 @@ dependencies = [
"rbx_xml", "rbx_xml",
"reqwest", "reqwest",
"ritz", "ritz",
"rmp-serde",
"roblox_install", "roblox_install",
"rojo-insta-ext", "rojo-insta-ext",
"semver", "semver",
"serde", "serde",
"serde_bytes",
"serde_json", "serde_json",
"serde_yaml", "serde_yaml",
"strum", "strum",
@@ -2221,6 +2231,16 @@ dependencies = [
"serde_derive", "serde_derive",
] ]
[[package]]
name = "serde_bytes"
version = "0.11.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a5d440709e79d88e51ac01c4b72fc6cb7314017bb7da9eeff678aa94c10e3ea8"
dependencies = [
"serde",
"serde_core",
]
[[package]] [[package]]
name = "serde_cbor" name = "serde_cbor"
version = "0.11.2" version = "0.11.2"

View File

@@ -100,10 +100,13 @@ clap = { version = "3.2.25", features = ["derive"] }
profiling = "1.0.15" profiling = "1.0.15"
yaml-rust2 = "0.10.3" yaml-rust2 = "0.10.3"
data-encoding = "2.8.0" data-encoding = "2.8.0"
pathdiff = "0.2.3"
blake3 = "1.5.0" blake3 = "1.5.0"
float-cmp = "0.9.0" float-cmp = "0.9.0"
indexmap = { version = "2.10.0", features = ["serde"] } indexmap = { version = "2.10.0", features = ["serde"] }
rmp-serde = "1.3.0"
serde_bytes = "0.11.19"
[target.'cfg(windows)'.dependencies] [target.'cfg(windows)'.dependencies]
winreg = "0.10.1" winreg = "0.10.1"
@@ -122,7 +125,7 @@ semver = "1.0.22"
rojo-insta-ext = { path = "crates/rojo-insta-ext" } rojo-insta-ext = { path = "crates/rojo-insta-ext" }
criterion = "0.3.6" criterion = "0.3.6"
insta = { version = "1.36.1", features = ["redactions", "yaml"] } insta = { version = "1.36.1", features = ["redactions", "yaml", "json"] }
paste = "1.0.14" paste = "1.0.14"
pretty_assertions = "1.4.0" pretty_assertions = "1.4.0"
serde_yaml = "0.8.26" serde_yaml = "0.8.26"

View File

@@ -30,6 +30,11 @@ fn snapshot_from_fs_path(path: &Path) -> io::Result<VfsSnapshot> {
continue; continue;
} }
// Ignore images in msgpack-luau because they aren't UTF-8 encoded.
if file_name.ends_with(".png") {
continue;
}
let child_snapshot = snapshot_from_fs_path(&entry.path())?; let child_snapshot = snapshot_from_fs_path(&entry.path())?;
children.push((file_name, child_snapshot)); children.push((file_name, child_snapshot));
} }

View File

@@ -1,6 +1,7 @@
# memofs Changelog # memofs Changelog
## Unreleased Changes ## Unreleased Changes
* Added `Vfs::canonicalize`. [#1201]
## 0.3.1 (2025-11-27) ## 0.3.1 (2025-11-27)
* Added `Vfs::exists`. [#1169] * Added `Vfs::exists`. [#1169]

View File

@@ -19,3 +19,6 @@ crossbeam-channel = "0.5.12"
fs-err = "2.11.0" fs-err = "2.11.0"
notify = "4.0.17" notify = "4.0.17"
serde = { version = "1.0.197", features = ["derive"] } serde = { version = "1.0.197", features = ["derive"] }
[dev-dependencies]
tempfile = "3.10.1"

View File

@@ -232,6 +232,33 @@ impl VfsBackend for InMemoryFs {
} }
} }
// TODO: We rely on Rojo to prepend cwd to any relative path before storing paths
// in MemoFS. The current implementation will error if no prepended absolute path
// is found. It really only normalizes paths within the provided path's context.
// Example: "/Users/username/project/../other/file.txt" ->
// "/Users/username/other/file.txt"
// Erroneous example: "/Users/../../other/file.txt" -> "/other/file.txt"
// This is not very robust. We should implement proper path normalization here or otherwise
// warn if we are missing context and can not fully canonicalize the path correctly.
fn canonicalize(&mut self, path: &Path) -> io::Result<PathBuf> {
let mut normalized = PathBuf::new();
for component in path.components() {
match component {
std::path::Component::ParentDir => {
normalized.pop();
}
std::path::Component::CurDir => {}
_ => normalized.push(component),
}
}
let inner = self.inner.lock().unwrap();
match inner.entries.get(&normalized) {
Some(_) => Ok(normalized),
None => not_found(&normalized),
}
}
fn event_receiver(&self) -> crossbeam_channel::Receiver<VfsEvent> { fn event_receiver(&self) -> crossbeam_channel::Receiver<VfsEvent> {
let inner = self.inner.lock().unwrap(); let inner = self.inner.lock().unwrap();

View File

@@ -77,6 +77,7 @@ pub trait VfsBackend: sealed::Sealed + Send + 'static {
fn metadata(&mut self, path: &Path) -> io::Result<Metadata>; fn metadata(&mut self, path: &Path) -> io::Result<Metadata>;
fn remove_file(&mut self, path: &Path) -> io::Result<()>; fn remove_file(&mut self, path: &Path) -> io::Result<()>;
fn remove_dir_all(&mut self, path: &Path) -> io::Result<()>; fn remove_dir_all(&mut self, path: &Path) -> io::Result<()>;
fn canonicalize(&mut self, path: &Path) -> io::Result<PathBuf>;
fn event_receiver(&self) -> crossbeam_channel::Receiver<VfsEvent>; fn event_receiver(&self) -> crossbeam_channel::Receiver<VfsEvent>;
fn watch(&mut self, path: &Path) -> io::Result<()>; fn watch(&mut self, path: &Path) -> io::Result<()>;
@@ -225,6 +226,11 @@ impl VfsInner {
self.backend.metadata(path) self.backend.metadata(path)
} }
fn canonicalize<P: AsRef<Path>>(&mut self, path: P) -> io::Result<PathBuf> {
let path = path.as_ref();
self.backend.canonicalize(path)
}
fn event_receiver(&self) -> crossbeam_channel::Receiver<VfsEvent> { fn event_receiver(&self) -> crossbeam_channel::Receiver<VfsEvent> {
self.backend.event_receiver() self.backend.event_receiver()
} }
@@ -413,6 +419,19 @@ impl Vfs {
self.inner.lock().unwrap().metadata(path) self.inner.lock().unwrap().metadata(path)
} }
/// Normalize a path via the underlying backend.
///
/// Roughly equivalent to [`std::fs::canonicalize`][std::fs::canonicalize]. Relative paths are
/// resolved against the backend's current working directory (if applicable) and errors are
/// surfaced directly from the backend.
///
/// [std::fs::canonicalize]: https://doc.rust-lang.org/stable/std/fs/fn.canonicalize.html
#[inline]
pub fn canonicalize<P: AsRef<Path>>(&self, path: P) -> io::Result<PathBuf> {
let path = path.as_ref();
self.inner.lock().unwrap().canonicalize(path)
}
/// Retrieve a handle to the event receiver for this `Vfs`. /// Retrieve a handle to the event receiver for this `Vfs`.
#[inline] #[inline]
pub fn event_receiver(&self) -> crossbeam_channel::Receiver<VfsEvent> { pub fn event_receiver(&self) -> crossbeam_channel::Receiver<VfsEvent> {
@@ -540,6 +559,13 @@ impl VfsLock<'_> {
self.inner.metadata(path) self.inner.metadata(path)
} }
/// Normalize a path via the underlying backend.
#[inline]
pub fn normalize<P: AsRef<Path>>(&mut self, path: P) -> io::Result<PathBuf> {
let path = path.as_ref();
self.inner.canonicalize(path)
}
/// Retrieve a handle to the event receiver for this `Vfs`. /// Retrieve a handle to the event receiver for this `Vfs`.
#[inline] #[inline]
pub fn event_receiver(&self) -> crossbeam_channel::Receiver<VfsEvent> { pub fn event_receiver(&self) -> crossbeam_channel::Receiver<VfsEvent> {
@@ -555,7 +581,9 @@ impl VfsLock<'_> {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use crate::{InMemoryFs, Vfs, VfsSnapshot}; use crate::{InMemoryFs, StdBackend, Vfs, VfsSnapshot};
use std::io;
use std::path::PathBuf;
/// https://github.com/rojo-rbx/rojo/issues/899 /// https://github.com/rojo-rbx/rojo/issues/899
#[test] #[test]
@@ -571,4 +599,62 @@ mod test {
"bar\nfoo\n\n" "bar\nfoo\n\n"
); );
} }
/// https://github.com/rojo-rbx/rojo/issues/1200
#[test]
fn canonicalize_in_memory_success() {
let mut imfs = InMemoryFs::new();
let contents = "Lorem ipsum dolor sit amet.".to_string();
imfs.load_snapshot("/test/file.txt", VfsSnapshot::file(contents.to_string()))
.unwrap();
let vfs = Vfs::new(imfs);
assert_eq!(
vfs.canonicalize("/test/nested/../file.txt").unwrap(),
PathBuf::from("/test/file.txt")
);
assert_eq!(
vfs.read_to_string(vfs.canonicalize("/test/nested/../file.txt").unwrap())
.unwrap()
.to_string(),
contents.to_string()
);
}
#[test]
fn canonicalize_in_memory_missing_errors() {
let imfs = InMemoryFs::new();
let vfs = Vfs::new(imfs);
let err = vfs.canonicalize("test").unwrap_err();
assert_eq!(err.kind(), io::ErrorKind::NotFound);
}
#[test]
fn canonicalize_std_backend_success() {
let contents = "Lorem ipsum dolor sit amet.".to_string();
let dir = tempfile::tempdir().unwrap();
let file_path = dir.path().join("file.txt");
fs_err::write(&file_path, contents.to_string()).unwrap();
let vfs = Vfs::new(StdBackend::new());
let canonicalized = vfs.canonicalize(&file_path).unwrap();
assert_eq!(canonicalized, file_path.canonicalize().unwrap());
assert_eq!(
vfs.read_to_string(&canonicalized).unwrap().to_string(),
contents.to_string()
);
}
#[test]
fn canonicalize_std_backend_missing_errors() {
let dir = tempfile::tempdir().unwrap();
let file_path = dir.path().join("test");
let vfs = Vfs::new(StdBackend::new());
let err = vfs.canonicalize(&file_path).unwrap_err();
assert_eq!(err.kind(), io::ErrorKind::NotFound);
}
} }

View File

@@ -1,5 +1,5 @@
use std::io; use std::io;
use std::path::Path; use std::path::{Path, PathBuf};
use crate::{Metadata, ReadDir, VfsBackend, VfsEvent}; use crate::{Metadata, ReadDir, VfsBackend, VfsEvent};
@@ -50,6 +50,10 @@ impl VfsBackend for NoopBackend {
Err(io::Error::other("NoopBackend doesn't do anything")) Err(io::Error::other("NoopBackend doesn't do anything"))
} }
fn canonicalize(&mut self, _path: &Path) -> io::Result<PathBuf> {
Err(io::Error::other("NoopBackend doesn't do anything"))
}
fn event_receiver(&self) -> crossbeam_channel::Receiver<VfsEvent> { fn event_receiver(&self) -> crossbeam_channel::Receiver<VfsEvent> {
crossbeam_channel::never() crossbeam_channel::never()
} }

View File

@@ -106,6 +106,10 @@ impl VfsBackend for StdBackend {
}) })
} }
fn canonicalize(&mut self, path: &Path) -> io::Result<PathBuf> {
fs_err::canonicalize(path)
}
fn event_receiver(&self) -> crossbeam_channel::Receiver<VfsEvent> { fn event_receiver(&self) -> crossbeam_channel::Receiver<VfsEvent> {
self.watcher_receiver.clone() self.watcher_receiver.clone()
} }

View File

@@ -1,5 +1,7 @@
local HttpService = game:GetService("HttpService") local HttpService = game:GetService("HttpService")
local msgpack = require(script.Parent.Parent.msgpack)
local stringTemplate = [[ local stringTemplate = [[
Http.Response { Http.Response {
code: %d code: %d
@@ -31,4 +33,8 @@ function Response:json()
return HttpService:JSONDecode(self.body) return HttpService:JSONDecode(self.body)
end end
function Response:msgpack()
return msgpack.decode(self.body)
end
return Response return Response

View File

@@ -1,7 +1,8 @@
local HttpService = game:GetService("HttpService") local HttpService = game:GetService("HttpService")
local Promise = require(script.Parent.Promise)
local Log = require(script.Parent.Log) local Log = require(script.Parent.Log)
local msgpack = require(script.Parent.msgpack)
local Promise = require(script.Parent.Promise)
local HttpError = require(script.Error) local HttpError = require(script.Error)
local HttpResponse = require(script.Response) local HttpResponse = require(script.Response)
@@ -68,4 +69,12 @@ function Http.jsonDecode(source)
return HttpService:JSONDecode(source) return HttpService:JSONDecode(source)
end end
function Http.msgpackEncode(object)
return msgpack.encode(object)
end
function Http.msgpackDecode(source)
return msgpack.decode(source)
end
return Http return Http

View File

@@ -145,7 +145,7 @@ function ApiContext:connect()
return Http.get(url) return Http.get(url)
:andThen(rejectFailedRequests) :andThen(rejectFailedRequests)
:andThen(Http.Response.json) :andThen(Http.Response.msgpack)
:andThen(rejectWrongProtocolVersion) :andThen(rejectWrongProtocolVersion)
:andThen(function(body) :andThen(function(body)
assert(validateApiInfo(body)) assert(validateApiInfo(body))
@@ -163,7 +163,7 @@ end
function ApiContext:read(ids) function ApiContext:read(ids)
local url = ("%s/api/read/%s"):format(self.__baseUrl, table.concat(ids, ",")) local url = ("%s/api/read/%s"):format(self.__baseUrl, table.concat(ids, ","))
return Http.get(url):andThen(rejectFailedRequests):andThen(Http.Response.json):andThen(function(body) return Http.get(url):andThen(rejectFailedRequests):andThen(Http.Response.msgpack):andThen(function(body)
if body.sessionId ~= self.__sessionId then if body.sessionId ~= self.__sessionId then
return Promise.reject("Server changed ID") return Promise.reject("Server changed ID")
end end
@@ -191,9 +191,9 @@ function ApiContext:write(patch)
table.insert(updated, fixedUpdate) table.insert(updated, fixedUpdate)
end end
-- Only add the 'added' field if the table is non-empty, or else Roblox's -- Only add the 'added' field if the table is non-empty, or else the msgpack
-- JSON implementation will turn the table into an array instead of an -- encode implementation will turn the table into an array instead of a map,
-- object, causing API validation to fail. -- causing API validation to fail.
local added local added
if next(patch.added) ~= nil then if next(patch.added) ~= nil then
added = patch.added added = patch.added
@@ -206,13 +206,16 @@ function ApiContext:write(patch)
added = added, added = added,
} }
body = Http.jsonEncode(body) body = Http.msgpackEncode(body)
return Http.post(url, body):andThen(rejectFailedRequests):andThen(Http.Response.json):andThen(function(responseBody) return Http.post(url, body)
Log.info("Write response: {:?}", responseBody) :andThen(rejectFailedRequests)
:andThen(Http.Response.msgpack)
:andThen(function(responseBody)
Log.info("Write response: {:?}", responseBody)
return responseBody return responseBody
end) end)
end end
function ApiContext:connectWebSocket(packetHandlers) function ApiContext:connectWebSocket(packetHandlers)
@@ -234,7 +237,7 @@ function ApiContext:connectWebSocket(packetHandlers)
local closed, errored, received local closed, errored, received
received = self.__wsClient.MessageReceived:Connect(function(msg) received = self.__wsClient.MessageReceived:Connect(function(msg)
local data = Http.jsonDecode(msg) local data = Http.msgpackDecode(msg)
if data.sessionId ~= self.__sessionId then if data.sessionId ~= self.__sessionId then
Log.warn("Received message with wrong session ID; ignoring") Log.warn("Received message with wrong session ID; ignoring")
return return
@@ -280,7 +283,7 @@ end
function ApiContext:open(id) function ApiContext:open(id)
local url = ("%s/api/open/%s"):format(self.__baseUrl, id) local url = ("%s/api/open/%s"):format(self.__baseUrl, id)
return Http.post(url, ""):andThen(rejectFailedRequests):andThen(Http.Response.json):andThen(function(body) return Http.post(url, ""):andThen(rejectFailedRequests):andThen(Http.Response.msgpack):andThen(function(body)
if body.sessionId ~= self.__sessionId then if body.sessionId ~= self.__sessionId then
return Promise.reject("Server changed ID") return Promise.reject("Server changed ID")
end end
@@ -290,31 +293,39 @@ function ApiContext:open(id)
end end
function ApiContext:serialize(ids: { string }) function ApiContext:serialize(ids: { string })
local url = ("%s/api/serialize/%s"):format(self.__baseUrl, table.concat(ids, ",")) local url = ("%s/api/serialize"):format(self.__baseUrl)
local request_body = Http.msgpackEncode({ sessionId = self.__sessionId, ids = ids })
return Http.get(url):andThen(rejectFailedRequests):andThen(Http.Response.json):andThen(function(body) return Http.post(url, request_body)
if body.sessionId ~= self.__sessionId then :andThen(rejectFailedRequests)
return Promise.reject("Server changed ID") :andThen(Http.Response.msgpack)
end :andThen(function(response_body)
if response_body.sessionId ~= self.__sessionId then
return Promise.reject("Server changed ID")
end
assert(validateApiSerialize(body)) assert(validateApiSerialize(response_body))
return body return response_body
end) end)
end end
function ApiContext:refPatch(ids: { string }) function ApiContext:refPatch(ids: { string })
local url = ("%s/api/ref-patch/%s"):format(self.__baseUrl, table.concat(ids, ",")) local url = ("%s/api/ref-patch"):format(self.__baseUrl)
local request_body = Http.msgpackEncode({ sessionId = self.__sessionId, ids = ids })
return Http.get(url):andThen(rejectFailedRequests):andThen(Http.Response.json):andThen(function(body) return Http.post(url, request_body)
if body.sessionId ~= self.__sessionId then :andThen(rejectFailedRequests)
return Promise.reject("Server changed ID") :andThen(Http.Response.msgpack)
end :andThen(function(response_body)
if response_body.sessionId ~= self.__sessionId then
return Promise.reject("Server changed ID")
end
assert(validateApiRefPatch(body)) assert(validateApiRefPatch(response_body))
return body return response_body
end) end)
end end
return ApiContext return ApiContext

View File

@@ -19,9 +19,15 @@ local FullscreenNotification = Roact.Component:extend("FullscreeFullscreenNotifi
function FullscreenNotification:init() function FullscreenNotification:init()
self.transparency, self.setTransparency = Roact.createBinding(0) self.transparency, self.setTransparency = Roact.createBinding(0)
self.lifetime = self.props.timeout self.lifetime = self.props.timeout
self.dismissed = false
end end
function FullscreenNotification:dismiss() function FullscreenNotification:dismiss()
if self.dismissed then
return
end
self.dismissed = true
if self.props.onClose then if self.props.onClose then
self.props.onClose() self.props.onClose()
end end
@@ -59,7 +65,7 @@ function FullscreenNotification:didMount()
end end
function FullscreenNotification:willUnmount() function FullscreenNotification:willUnmount()
if self.timeout and coroutine.status(self.timeout) ~= "dead" then if self.timeout and coroutine.status(self.timeout) == "suspended" then
task.cancel(self.timeout) task.cancel(self.timeout)
end end
end end

View File

@@ -25,6 +25,7 @@ function Notification:init()
self.binding = bindingUtil.fromMotor(self.motor) self.binding = bindingUtil.fromMotor(self.motor)
self.lifetime = self.props.timeout self.lifetime = self.props.timeout
self.dismissed = false
self.motor:onStep(function(value) self.motor:onStep(function(value)
if value <= 0 and self.props.onClose then if value <= 0 and self.props.onClose then
@@ -34,6 +35,11 @@ function Notification:init()
end end
function Notification:dismiss() function Notification:dismiss()
if self.dismissed then
return
end
self.dismissed = true
self.motor:setGoal(Flipper.Spring.new(0, { self.motor:setGoal(Flipper.Spring.new(0, {
frequency = 5, frequency = 5,
dampingRatio = 1, dampingRatio = 1,
@@ -75,7 +81,7 @@ function Notification:didMount()
end end
function Notification:willUnmount() function Notification:willUnmount()
if self.timeout and coroutine.status(self.timeout) ~= "dead" then if self.timeout and coroutine.status(self.timeout) == "suspended" then
task.cancel(self.timeout) task.cancel(self.timeout)
end end
end end

View File

@@ -301,6 +301,19 @@ function App:setPriorSyncInfo(host: string, port: string, projectName: string)
Settings:set("priorEndpoints", priorSyncInfos) Settings:set("priorEndpoints", priorSyncInfos)
end end
function App:forgetPriorSyncInfo()
local priorSyncInfos = Settings:get("priorEndpoints")
if not priorSyncInfos then
priorSyncInfos = {}
end
local id = tostring(game.PlaceId)
priorSyncInfos[id] = nil
Log.trace("Erased last used endpoint for {}", game.PlaceId)
Settings:set("priorEndpoints", priorSyncInfos)
end
function App:getHostAndPort() function App:getHostAndPort()
local host = self.host:getValue() local host = self.host:getValue()
local port = self.port:getValue() local port = self.port:getValue()
@@ -435,7 +448,8 @@ function App:checkSyncReminder()
self:findActiveServer() self:findActiveServer()
:andThen(function(serverInfo, host, port) :andThen(function(serverInfo, host, port)
self:sendSyncReminder( self:sendSyncReminder(
`Project '{serverInfo.projectName}' is serving at {host}:{port}.\nWould you like to connect?` `Project '{serverInfo.projectName}' is serving at {host}:{port}.\nWould you like to connect?`,
{ "Connect", "Dismiss" }
) )
end) end)
:catch(function() :catch(function()
@@ -446,7 +460,8 @@ function App:checkSyncReminder()
local timeSinceSync = timeUtil.elapsedToText(os.time() - priorSyncInfo.timestamp) local timeSinceSync = timeUtil.elapsedToText(os.time() - priorSyncInfo.timestamp)
self:sendSyncReminder( self:sendSyncReminder(
`You synced project '{priorSyncInfo.projectName}' to this place {timeSinceSync}.\nDid you mean to run 'rojo serve' and then connect?` `You synced project '{priorSyncInfo.projectName}' to this place {timeSinceSync}.\nDid you mean to run 'rojo serve' and then connect?`,
{ "Connect", "Forget", "Dismiss" }
) )
end end
end) end)
@@ -486,12 +501,16 @@ function App:stopSyncReminderPolling()
end end
end end
function App:sendSyncReminder(message: string) function App:sendSyncReminder(message: string, shownActions: { string })
local syncReminderMode = Settings:get("syncReminderMode") local syncReminderMode = Settings:get("syncReminderMode")
if syncReminderMode == "None" then if syncReminderMode == "None" then
return return
end end
local connectIndex = table.find(shownActions, "Connect")
local forgetIndex = table.find(shownActions, "Forget")
local dismissIndex = table.find(shownActions, "Dismiss")
self.dismissSyncReminder = self:addNotification({ self.dismissSyncReminder = self:addNotification({
text = message, text = message,
timeout = 120, timeout = 120,
@@ -500,24 +519,39 @@ function App:sendSyncReminder(message: string)
self.dismissSyncReminder = nil self.dismissSyncReminder = nil
end, end,
actions = { actions = {
Connect = { Connect = if connectIndex
text = "Connect", then {
style = "Solid", text = "Connect",
layoutOrder = 1, style = "Solid",
onClick = function() layoutOrder = connectIndex,
self:startSession() onClick = function()
end, self:startSession()
}, end,
Dismiss = { }
text = "Dismiss", else nil,
style = "Bordered", Forget = if forgetIndex
layoutOrder = 2, then {
onClick = function() text = "Forget",
-- If the user dismisses the reminder, style = "Bordered",
-- then we don't need to remind them again layoutOrder = forgetIndex,
self:stopSyncReminderPolling() onClick = function()
end, -- The user doesn't want to be reminded again about this sync
}, self:forgetPriorSyncInfo()
end,
}
else nil,
Dismiss = if dismissIndex
then {
text = "Dismiss",
style = "Bordered",
layoutOrder = dismissIndex,
onClick = function()
-- If the user dismisses the reminder,
-- then we don't need to remind them again
self:stopSyncReminderPolling()
end,
}
else nil,
}, },
}) })
end end

View File

@@ -54,6 +54,10 @@ local function trueEquals(a, b): boolean
end end
return true return true
-- For NaN, check if both values are not equal to themselves
elseif a ~= a and b ~= b then
return true
-- For numbers, compare with epsilon of 0.0001 to avoid floating point inequality -- For numbers, compare with epsilon of 0.0001 to avoid floating point inequality
elseif typeA == "number" and typeB == "number" then elseif typeA == "number" and typeB == "number" then
return fuzzyEq(a, b, 0.0001) return fuzzyEq(a, b, 0.0001)

View File

@@ -41,14 +41,41 @@ function reifyInstanceInner(unappliedPatch, deferredRefs, instanceMap, virtualIn
invariant("Cannot reify an instance not present in virtualInstances\nID: {}", id) invariant("Cannot reify an instance not present in virtualInstances\nID: {}", id)
end end
-- Instance.new can fail if we're passing in something that can't be -- Before creating a new instance, check if the parent already has an
-- created, like a service, something enabled with a feature flag, or -- untracked child with the same Name and ClassName. This enables "late
-- something that requires higher security than we have. -- adoption" of instances that exist in Studio but weren't in the initial
local createSuccess, instance = pcall(Instance.new, virtualInstance.ClassName) -- Rojo tree (e.g., when using --git-since filtering). Without this,
-- newly acknowledged files would create duplicate instances.
local adoptedExisting = false
local instance = nil
if not createSuccess then for _, child in ipairs(parentInstance:GetChildren()) do
addAllToPatch(unappliedPatch, virtualInstances, id) local accessSuccess, name, className = pcall(function()
return return child.Name, child.ClassName
end)
if accessSuccess
and name == virtualInstance.Name
and className == virtualInstance.ClassName
and instanceMap.fromInstances[child] == nil
then
instance = child
adoptedExisting = true
break
end
end
if not adoptedExisting then
-- Instance.new can fail if we're passing in something that can't be
-- created, like a service, something enabled with a feature flag, or
-- something that requires higher security than we have.
local createSuccess
createSuccess, instance = pcall(Instance.new, virtualInstance.ClassName)
if not createSuccess then
addAllToPatch(unappliedPatch, virtualInstances, id)
return
end
end end
-- TODO: Can this fail? Previous versions of Rojo guarded against this, but -- TODO: Can this fail? Previous versions of Rojo guarded against this, but
@@ -96,7 +123,9 @@ function reifyInstanceInner(unappliedPatch, deferredRefs, instanceMap, virtualIn
reifyInstanceInner(unappliedPatch, deferredRefs, instanceMap, virtualInstances, childId, instance) reifyInstanceInner(unappliedPatch, deferredRefs, instanceMap, virtualInstances, childId, instance)
end end
instance.Parent = parentInstance if not adoptedExisting then
instance.Parent = parentInstance
end
instanceMap:insert(id, instance) instanceMap:insert(id, instance)
end end

View File

@@ -1,16 +0,0 @@
---
source: tests/tests/build.rs
expression: contents
---
<roblox version="4">
<Item class="Folder" referent="0">
<Properties>
<string name="Name">json_model_legacy_name</string>
</Properties>
<Item class="Folder" referent="1">
<Properties>
<string name="Name">Expected Name</string>
</Properties>
</Item>
</Item>
</roblox>

View File

@@ -0,0 +1,23 @@
---
source: tests/tests/build.rs
assertion_line: 109
expression: contents
---
<roblox version="4">
<Item class="DataModel" referent="0">
<Properties>
<string name="Name">model_json_name_input</string>
</Properties>
<Item class="Workspace" referent="1">
<Properties>
<string name="Name">Workspace</string>
<bool name="NeedsPivotMigration">false</bool>
</Properties>
<Item class="StringValue" referent="2">
<Properties>
<string name="Name">/Bar</string>
</Properties>
</Item>
</Item>
</Item>
</roblox>

View File

@@ -0,0 +1,20 @@
---
source: tests/tests/build.rs
assertion_line: 108
expression: contents
---
<roblox version="4">
<Item class="Folder" referent="0">
<Properties>
<string name="Name">slugified_name_roundtrip</string>
</Properties>
<Item class="Script" referent="1">
<Properties>
<string name="Name">/Script</string>
<token name="RunContext">0</token>
<string name="Source"><![CDATA[print("Hello world!")
]]></string>
</Properties>
</Item>
</Item>
</roblox>

View File

@@ -1,6 +0,0 @@
{
"name": "json_model_legacy_name",
"tree": {
"$path": "folder"
}
}

View File

@@ -1,4 +0,0 @@
{
"Name": "Overridden Name",
"ClassName": "Folder"
}

View File

@@ -0,0 +1,11 @@
{
"name": "model_json_name_input",
"tree": {
"$className": "DataModel",
"Workspace": {
"$className": "Workspace",
"$path": "src"
}
}
}

View File

@@ -0,0 +1,5 @@
{
"name": "/Bar",
"className": "StringValue"
}

View File

@@ -0,0 +1,4 @@
{
"name": "/Script"
}

View File

@@ -0,0 +1,2 @@
print("Hello world!")

View File

@@ -0,0 +1,6 @@
{
"name": "slugified_name_roundtrip",
"tree": {
"$path": "src"
}
}

View File

@@ -0,0 +1,3 @@
{
"name": "/Script"
}

View File

@@ -0,0 +1 @@
print("Hello world!")

View File

@@ -0,0 +1,6 @@
---
source: tests/rojo_test/syncback_util.rs
assertion_line: 101
expression: "String::from_utf8_lossy(&output.stdout)"
---

View File

@@ -0,0 +1,13 @@
---
source: tests/rojo_test/syncback_util.rs
assertion_line: 101
expression: "String::from_utf8_lossy(&output.stdout)"
---
Writing default.project.json
Writing src/Camera.rbxm
Writing src/Terrain.rbxm
Writing src/_Folder/init.meta.json
Writing src/_Script.meta.json
Writing src/_Script.server.luau
Writing src
Writing src/_Folder

View File

@@ -0,0 +1,9 @@
---
source: tests/tests/syncback.rs
assertion_line: 31
expression: src/foo.model.json
---
{
"name": "/Bar",
"className": "StringValue"
}

View File

@@ -0,0 +1,8 @@
---
source: tests/tests/syncback.rs
assertion_line: 31
expression: src/_Folder.model.json
---
{
"className": "Folder"
}

View File

@@ -0,0 +1,8 @@
---
source: tests/tests/syncback.rs
assertion_line: 31
expression: src/_Folder/init.meta.json
---
{
"name": "/Folder"
}

View File

@@ -0,0 +1,8 @@
---
source: tests/tests/syncback.rs
assertion_line: 31
expression: src/_Script.meta.json
---
{
"name": "/Script"
}

View File

@@ -0,0 +1,6 @@
---
source: tests/tests/syncback.rs
assertion_line: 31
expression: src/_Script.server.luau
---
print("Hello world!")

View File

@@ -0,0 +1,8 @@
---
source: tests/tests/syncback.rs
assertion_line: 31
expression: src/_Script/init.meta.json
---
{
"name": "/Script"
}

View File

@@ -0,0 +1,6 @@
---
source: tests/tests/syncback.rs
assertion_line: 31
expression: src/_Script/init.server.luau
---
print("Hello world!")

View File

@@ -0,0 +1,11 @@
{
"name": "model_json_name",
"tree": {
"$className": "DataModel",
"Workspace": {
"$className": "Workspace",
"$path": "src"
}
}
}

View File

@@ -0,0 +1,5 @@
{
"name": "/Bar",
"className": "StringValue"
}

Binary file not shown.

View File

@@ -0,0 +1,10 @@
{
"name": "slugified_name",
"tree": {
"$className": "DataModel",
"Workspace": {
"$className": "Workspace",
"$path": "src"
}
}
}

Binary file not shown.

View File

@@ -1,12 +1,12 @@
use std::{
fs,
sync::{Arc, Mutex},
};
use crossbeam_channel::{select, Receiver, RecvError, Sender}; use crossbeam_channel::{select, Receiver, RecvError, Sender};
use jod_thread::JoinHandle; use jod_thread::JoinHandle;
use memofs::{IoResultExt, Vfs, VfsEvent}; use memofs::{IoResultExt, Vfs, VfsEvent};
use rbx_dom_weak::types::{Ref, Variant}; use rbx_dom_weak::types::{Ref, Variant};
use std::path::PathBuf;
use std::{
fs,
sync::{Arc, Mutex},
};
use crate::{ use crate::{
git::SharedGitFilter, git::SharedGitFilter,
@@ -124,6 +124,49 @@ struct JobThreadContext {
} }
impl JobThreadContext { impl JobThreadContext {
/// Computes and applies patches to the DOM for a given file path.
///
/// This function finds the nearest ancestor to the given path that has associated instances
/// in the tree.
/// It then computes and applies changes for each affected instance ID and
/// returns a vector of applied patch sets.
fn apply_patches(&self, path: PathBuf) -> Vec<AppliedPatchSet> {
let mut tree = self.tree.lock().unwrap();
let mut applied_patches = Vec::new();
// Find the nearest ancestor to this path that has
// associated instances in the tree. This helps make sure
// that we handle additions correctly, especially if we
// receive events for descendants of a large tree being
// created all at once.
let mut current_path = path.as_path();
let affected_ids = loop {
let ids = tree.get_ids_at_path(current_path);
log::trace!("Path {} affects IDs {:?}", current_path.display(), ids);
if !ids.is_empty() {
break ids.to_vec();
}
log::trace!("Trying parent path...");
match current_path.parent() {
Some(parent) => current_path = parent,
None => break Vec::new(),
}
};
for id in affected_ids {
if let Some(patch) = compute_and_apply_changes(&mut tree, &self.vfs, id) {
if !patch.is_empty() {
applied_patches.push(patch);
}
}
}
applied_patches
}
fn handle_vfs_event(&self, event: VfsEvent) { fn handle_vfs_event(&self, event: VfsEvent) {
log::trace!("Vfs event: {:?}", event); log::trace!("Vfs event: {:?}", event);
@@ -143,54 +186,16 @@ impl JobThreadContext {
// For a given VFS event, we might have many changes to different parts // For a given VFS event, we might have many changes to different parts
// of the tree. Calculate and apply all of these changes. // of the tree. Calculate and apply all of these changes.
let applied_patches = match event { let applied_patches = match event {
VfsEvent::Create(path) | VfsEvent::Remove(path) | VfsEvent::Write(path) => { VfsEvent::Create(path) | VfsEvent::Write(path) => {
let mut tree = self.tree.lock().unwrap(); self.apply_patches(self.vfs.canonicalize(&path).unwrap())
let mut applied_patches = Vec::new(); }
VfsEvent::Remove(path) => {
// Find the nearest ancestor to this path that has // MemoFS does not track parent removals yet, so we can canonicalize
// associated instances in the tree. This helps make sure // the parent path safely and then append the removed path's file name.
// that we handle additions correctly, especially if we let parent = path.parent().unwrap();
// receive events for descendants of a large tree being let file_name = path.file_name().unwrap();
// created all at once. let parent_normalized = self.vfs.canonicalize(parent).unwrap();
let mut current_path = path.as_path(); self.apply_patches(parent_normalized.join(file_name))
let affected_ids = loop {
let ids = tree.get_ids_at_path(current_path);
log::trace!("Path {} affects IDs {:?}", current_path.display(), ids);
if !ids.is_empty() {
break ids.to_vec();
}
log::trace!("Trying parent path...");
match current_path.parent() {
Some(parent) => current_path = parent,
None => break Vec::new(),
}
};
if affected_ids.is_empty() {
log::debug!(
"No instances found for path {} or any of its ancestors",
path.display()
);
} else {
log::debug!(
"Found {} affected instances for path {}",
affected_ids.len(),
path.display()
);
}
for id in affected_ids {
if let Some(patch) = compute_and_apply_changes(&mut tree, &self.vfs, id) {
if !patch.is_empty() {
applied_patches.push(patch);
}
}
}
applied_patches
} }
_ => { _ => {
log::warn!("Unhandled VFS event: {:?}", event); log::warn!("Unhandled VFS event: {:?}", event);

View File

@@ -0,0 +1,35 @@
---
source: src/cli/sourcemap.rs
expression: sourcemap_contents
---
{
"name": "default",
"className": "DataModel",
"filePaths": "[...1 path omitted...]",
"children": [
{
"name": "ReplicatedStorage",
"className": "ReplicatedStorage",
"children": [
{
"name": "Project",
"className": "ModuleScript",
"filePaths": "[...1 path omitted...]",
"children": [
{
"name": "Module",
"className": "Folder",
"children": [
{
"name": "module",
"className": "ModuleScript",
"filePaths": "[...1 path omitted...]"
}
]
}
]
}
]
}
]
}

View File

@@ -0,0 +1,41 @@
---
source: src/cli/sourcemap.rs
expression: sourcemap_contents
---
{
"name": "default",
"className": "DataModel",
"filePaths": [
"default.project.json"
],
"children": [
{
"name": "ReplicatedStorage",
"className": "ReplicatedStorage",
"children": [
{
"name": "Project",
"className": "ModuleScript",
"filePaths": [
"src/init.luau"
],
"children": [
{
"name": "Module",
"className": "Folder",
"children": [
{
"name": "module",
"className": "ModuleScript",
"filePaths": [
"../module/module.luau"
]
}
]
}
]
}
]
}
]
}

View File

@@ -10,7 +10,7 @@ use fs_err::File;
use memofs::Vfs; use memofs::Vfs;
use rayon::prelude::*; use rayon::prelude::*;
use rbx_dom_weak::{types::Ref, Ustr}; use rbx_dom_weak::{types::Ref, Ustr};
use serde::Serialize; use serde::{Deserialize, Serialize};
use tokio::runtime::Runtime; use tokio::runtime::Runtime;
use crate::{ use crate::{
@@ -24,19 +24,20 @@ const PATH_STRIP_FAILED_ERR: &str = "Failed to create relative paths for project
const ABSOLUTE_PATH_FAILED_ERR: &str = "Failed to turn relative path into absolute path!"; const ABSOLUTE_PATH_FAILED_ERR: &str = "Failed to turn relative path into absolute path!";
/// Representation of a node in the generated sourcemap tree. /// Representation of a node in the generated sourcemap tree.
#[derive(Serialize)] #[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
struct SourcemapNode<'a> { struct SourcemapNode<'a> {
name: &'a str, name: &'a str,
class_name: Ustr, class_name: Ustr,
#[serde( #[serde(
default,
skip_serializing_if = "Vec::is_empty", skip_serializing_if = "Vec::is_empty",
serialize_with = "crate::path_serializer::serialize_vec_absolute" serialize_with = "crate::path_serializer::serialize_vec_absolute"
)] )]
file_paths: Vec<Cow<'a, Path>>, file_paths: Vec<Cow<'a, Path>>,
#[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default, skip_serializing_if = "Vec::is_empty")]
children: Vec<SourcemapNode<'a>>, children: Vec<SourcemapNode<'a>>,
} }
@@ -70,12 +71,13 @@ pub struct SourcemapCommand {
impl SourcemapCommand { impl SourcemapCommand {
pub fn run(self) -> anyhow::Result<()> { pub fn run(self) -> anyhow::Result<()> {
let project_path = resolve_path(&self.project); let project_path = fs_err::canonicalize(resolve_path(&self.project))?;
log::trace!("Constructing in-memory filesystem"); log::trace!("Constructing filesystem with StdBackend");
let vfs = Vfs::new_default(); let vfs = Vfs::new_default();
vfs.set_watch_enabled(self.watch); vfs.set_watch_enabled(self.watch);
log::trace!("Setting up session for sourcemap generation");
let session = ServeSession::new(vfs, project_path, None)?; let session = ServeSession::new(vfs, project_path, None)?;
let mut cursor = session.message_queue().cursor(); let mut cursor = session.message_queue().cursor();
@@ -87,14 +89,17 @@ impl SourcemapCommand {
// Pre-build a rayon threadpool with a low number of threads to avoid // Pre-build a rayon threadpool with a low number of threads to avoid
// dynamic creation overhead on systems with a high number of cpus. // dynamic creation overhead on systems with a high number of cpus.
log::trace!("Setting rayon global threadpool");
rayon::ThreadPoolBuilder::new() rayon::ThreadPoolBuilder::new()
.num_threads(num_cpus::get().min(6)) .num_threads(num_cpus::get().min(6))
.build_global() .build_global()
.unwrap(); .ok();
log::trace!("Writing initial sourcemap");
write_sourcemap(&session, self.output.as_deref(), filter, self.absolute)?; write_sourcemap(&session, self.output.as_deref(), filter, self.absolute)?;
if self.watch { if self.watch {
log::trace!("Setting up runtime for watch mode");
let rt = Runtime::new().unwrap(); let rt = Runtime::new().unwrap();
loop { loop {
@@ -208,7 +213,7 @@ fn recurse_create_node<'a>(
} else { } else {
for val in file_paths { for val in file_paths {
output_file_paths.push(Cow::from( output_file_paths.push(Cow::from(
val.strip_prefix(project_dir).expect(PATH_STRIP_FAILED_ERR), pathdiff::diff_paths(val, project_dir).expect(PATH_STRIP_FAILED_ERR),
)); ));
} }
}; };
@@ -250,3 +255,80 @@ fn write_sourcemap(
Ok(()) Ok(())
} }
#[cfg(test)]
mod test {
use crate::cli::sourcemap::SourcemapNode;
use crate::cli::SourcemapCommand;
use insta::internals::Content;
use std::path::Path;
#[test]
fn maps_relative_paths() {
let sourcemap_dir = tempfile::tempdir().unwrap();
let sourcemap_output = sourcemap_dir.path().join("sourcemap.json");
let project_path = fs_err::canonicalize(
Path::new(env!("CARGO_MANIFEST_DIR"))
.join("test-projects")
.join("relative_paths")
.join("project"),
)
.unwrap();
let sourcemap_command = SourcemapCommand {
project: project_path,
output: Some(sourcemap_output.clone()),
include_non_scripts: false,
watch: false,
absolute: false,
};
assert!(sourcemap_command.run().is_ok());
let raw_sourcemap_contents = fs_err::read_to_string(sourcemap_output.as_path()).unwrap();
let sourcemap_contents =
serde_json::from_str::<SourcemapNode>(&raw_sourcemap_contents).unwrap();
insta::assert_json_snapshot!(sourcemap_contents);
}
#[test]
fn maps_absolute_paths() {
let sourcemap_dir = tempfile::tempdir().unwrap();
let sourcemap_output = sourcemap_dir.path().join("sourcemap.json");
let project_path = fs_err::canonicalize(
Path::new(env!("CARGO_MANIFEST_DIR"))
.join("test-projects")
.join("relative_paths")
.join("project"),
)
.unwrap();
let sourcemap_command = SourcemapCommand {
project: project_path,
output: Some(sourcemap_output.clone()),
include_non_scripts: false,
watch: false,
absolute: true,
};
assert!(sourcemap_command.run().is_ok());
let raw_sourcemap_contents = fs_err::read_to_string(sourcemap_output.as_path()).unwrap();
let sourcemap_contents =
serde_json::from_str::<SourcemapNode>(&raw_sourcemap_contents).unwrap();
insta::assert_json_snapshot!(sourcemap_contents, {
".**.filePaths" => insta::dynamic_redaction(|mut value, _path| {
let mut paths_count = 0;
match value {
Content::Seq(ref mut vec) => {
for path in vec.iter().map(|i| i.as_str().unwrap()) {
assert_eq!(fs_err::canonicalize(path).is_ok(), true, "path was not valid");
assert_eq!(Path::new(path).is_absolute(), true, "path was not absolute");
paths_count += 1;
}
}
_ => panic!("Expected filePaths to be a sequence"),
}
format!("[...{} path{} omitted...]", paths_count, if paths_count != 1 { "s" } else { "" } )
})
});
}
}

View File

@@ -71,6 +71,12 @@ pub struct InstanceMetadata {
/// A schema provided via a JSON file, if one exists. Will be `None` for /// A schema provided via a JSON file, if one exists. Will be `None` for
/// all non-JSON middleware. /// all non-JSON middleware.
pub schema: Option<String>, pub schema: Option<String>,
/// A custom name specified via meta.json or model.json files. If present,
/// this name will be used for the instance while the filesystem name will
/// be slugified to remove illegal characters.
#[serde(skip_serializing_if = "Option::is_none")]
pub specified_name: Option<String>,
} }
impl InstanceMetadata { impl InstanceMetadata {
@@ -83,6 +89,7 @@ impl InstanceMetadata {
specified_id: None, specified_id: None,
middleware: None, middleware: None,
schema: None, schema: None,
specified_name: None,
} }
} }
@@ -131,6 +138,13 @@ impl InstanceMetadata {
pub fn schema(self, schema: Option<String>) -> Self { pub fn schema(self, schema: Option<String>) -> Self {
Self { schema, ..self } Self { schema, ..self }
} }
pub fn specified_name(self, specified_name: Option<String>) -> Self {
Self {
specified_name,
..self
}
}
} }
impl Default for InstanceMetadata { impl Default for InstanceMetadata {

View File

@@ -8,7 +8,7 @@ use rbx_dom_weak::{
ustr, HashMapExt as _, UstrMap, UstrSet, ustr, HashMapExt as _, UstrMap, UstrSet,
}; };
use crate::{RojoRef, REF_POINTER_ATTRIBUTE_PREFIX}; use crate::{variant_eq::variant_eq, RojoRef, REF_POINTER_ATTRIBUTE_PREFIX};
use super::{ use super::{
patch::{PatchAdd, PatchSet, PatchUpdate}, patch::{PatchAdd, PatchSet, PatchUpdate},
@@ -127,7 +127,7 @@ fn compute_property_patches(
match instance.properties().get(&name) { match instance.properties().get(&name) {
Some(instance_value) => { Some(instance_value) => {
if &snapshot_value != instance_value { if !variant_eq(&snapshot_value, instance_value) {
changed_properties.insert(name, Some(snapshot_value)); changed_properties.insert(name, Some(snapshot_value));
} }
} }

View File

@@ -42,7 +42,7 @@ pub fn snapshot_csv(
.metadata( .metadata(
InstanceMetadata::new() InstanceMetadata::new()
.instigating_source(path) .instigating_source(path)
.relevant_paths(vec![path.to_path_buf()]), .relevant_paths(vec![vfs.canonicalize(path)?]),
); );
AdjacentMetadata::read_and_apply_all(vfs, path, name, &mut snapshot)?; AdjacentMetadata::read_and_apply_all(vfs, path, name, &mut snapshot)?;

View File

@@ -62,18 +62,19 @@ pub fn snapshot_dir_no_meta(
} }
} }
let normalized_path = vfs.canonicalize(path)?;
let relevant_paths = vec![ let relevant_paths = vec![
path.to_path_buf(), normalized_path.clone(),
// TODO: We shouldn't need to know about Lua existing in this // TODO: We shouldn't need to know about Lua existing in this
// middleware. Should we figure out a way for that function to add // middleware. Should we figure out a way for that function to add
// relevant paths to this middleware? // relevant paths to this middleware?
path.join("init.lua"), normalized_path.join("init.lua"),
path.join("init.luau"), normalized_path.join("init.luau"),
path.join("init.server.lua"), normalized_path.join("init.server.lua"),
path.join("init.server.luau"), normalized_path.join("init.server.luau"),
path.join("init.client.lua"), normalized_path.join("init.client.lua"),
path.join("init.client.luau"), normalized_path.join("init.client.luau"),
path.join("init.csv"), normalized_path.join("init.csv"),
]; ];
let snapshot = InstanceSnapshot::new() let snapshot = InstanceSnapshot::new()

View File

@@ -32,7 +32,7 @@ pub fn snapshot_json(
.metadata( .metadata(
InstanceMetadata::new() InstanceMetadata::new()
.instigating_source(path) .instigating_source(path)
.relevant_paths(vec![path.to_path_buf()]) .relevant_paths(vec![vfs.canonicalize(path)?])
.context(context), .context(context),
); );

View File

@@ -35,20 +35,14 @@ pub fn snapshot_json_model(
format!("File is not a valid JSON model: {}", path.display()) format!("File is not a valid JSON model: {}", path.display())
})?; })?;
if let Some(top_level_name) = &instance.name { // If the JSON has a name property, preserve it in metadata for syncback
let new_name = format!("{}.model.json", top_level_name); let specified_name = instance.name.clone();
log::warn!( // Use the name from JSON if present, otherwise fall back to filename-derived name
"Model at path {} had a top-level Name field. \ if instance.name.is_none() {
This field has been ignored since Rojo 6.0.\n\ instance.name = Some(name.to_owned());
Consider removing this field and renaming the file to {}.",
new_name,
path.display()
);
} }
instance.name = Some(name.to_owned());
let id = instance.id.take().map(RojoRef::new); let id = instance.id.take().map(RojoRef::new);
let schema = instance.schema.take(); let schema = instance.schema.take();
@@ -59,10 +53,11 @@ pub fn snapshot_json_model(
snapshot.metadata = snapshot snapshot.metadata = snapshot
.metadata .metadata
.instigating_source(path) .instigating_source(path)
.relevant_paths(vec![path.to_path_buf()]) .relevant_paths(vec![vfs.canonicalize(path)?])
.context(context) .context(context)
.specified_id(id) .specified_id(id)
.schema(schema); .schema(schema)
.specified_name(specified_name);
Ok(Some(snapshot)) Ok(Some(snapshot))
} }
@@ -81,6 +76,7 @@ pub fn syncback_json_model<'sync>(
// schemas will ever exist in one project for it to matter, but it // schemas will ever exist in one project for it to matter, but it
// could have a performance cost. // could have a performance cost.
model.schema = old_inst.metadata().schema.clone(); model.schema = old_inst.metadata().schema.clone();
model.name = old_inst.metadata().specified_name.clone();
} }
Ok(SyncbackReturn { Ok(SyncbackReturn {

View File

@@ -88,7 +88,7 @@ pub fn snapshot_lua(
.metadata( .metadata(
InstanceMetadata::new() InstanceMetadata::new()
.instigating_source(path) .instigating_source(path)
.relevant_paths(vec![path.to_path_buf()]) .relevant_paths(vec![vfs.canonicalize(path)?])
.context(context), .context(context),
); );
@@ -158,8 +158,16 @@ pub fn syncback_lua<'sync>(
if !meta.is_empty() { if !meta.is_empty() {
let parent_location = snapshot.path.parent_err()?; let parent_location = snapshot.path.parent_err()?;
let instance_name = &snapshot.new_inst().name;
let slugified;
let meta_name = if crate::syncback::validate_file_name(instance_name).is_err() {
slugified = crate::syncback::slugify_name(instance_name);
&slugified
} else {
instance_name
};
fs_snapshot.add_file( fs_snapshot.add_file(
parent_location.join(format!("{}.meta.json", new_inst.name)), parent_location.join(format!("{}.meta.json", meta_name)),
serde_json::to_vec_pretty(&meta).context("cannot serialize metadata")?, serde_json::to_vec_pretty(&meta).context("cannot serialize metadata")?,
); );
} }

View File

@@ -10,7 +10,10 @@ use rbx_dom_weak::{
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{ use crate::{
json, resolution::UnresolvedValue, snapshot::InstanceSnapshot, syncback::SyncbackSnapshot, json,
resolution::UnresolvedValue,
snapshot::InstanceSnapshot,
syncback::{validate_file_name, SyncbackSnapshot},
RojoRef, RojoRef,
}; };
@@ -36,6 +39,9 @@ pub struct AdjacentMetadata {
#[serde(default, skip_serializing_if = "IndexMap::is_empty")] #[serde(default, skip_serializing_if = "IndexMap::is_empty")]
pub attributes: IndexMap<String, UnresolvedValue>, pub attributes: IndexMap<String, UnresolvedValue>,
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(skip)] #[serde(skip)]
pub path: PathBuf, pub path: PathBuf,
} }
@@ -144,6 +150,24 @@ impl AdjacentMetadata {
} }
} }
let name = snapshot
.old_inst()
.and_then(|inst| inst.metadata().specified_name.clone())
.or_else(|| {
// If this is a new instance and its name is invalid for the filesystem,
// we need to specify the name in meta.json so it can be preserved
if snapshot.old_inst().is_none() {
let instance_name = &snapshot.new_inst().name;
if validate_file_name(instance_name).is_err() {
Some(instance_name.clone())
} else {
None
}
} else {
None
}
});
Ok(Some(Self { Ok(Some(Self {
ignore_unknown_instances: if ignore_unknown_instances { ignore_unknown_instances: if ignore_unknown_instances {
Some(true) Some(true)
@@ -155,6 +179,7 @@ impl AdjacentMetadata {
path, path,
id: None, id: None,
schema, schema,
name,
})) }))
} }
@@ -213,11 +238,26 @@ impl AdjacentMetadata {
Ok(()) Ok(())
} }
fn apply_name(&mut self, snapshot: &mut InstanceSnapshot) -> anyhow::Result<()> {
if self.name.is_some() && snapshot.metadata.specified_name.is_some() {
anyhow::bail!(
"cannot specify a name using {} (instance has a name from somewhere else)",
self.path.display()
);
}
if let Some(name) = &self.name {
snapshot.name = name.clone().into();
}
snapshot.metadata.specified_name = self.name.take();
Ok(())
}
pub fn apply_all(&mut self, snapshot: &mut InstanceSnapshot) -> anyhow::Result<()> { pub fn apply_all(&mut self, snapshot: &mut InstanceSnapshot) -> anyhow::Result<()> {
self.apply_ignore_unknown_instances(snapshot); self.apply_ignore_unknown_instances(snapshot);
self.apply_properties(snapshot)?; self.apply_properties(snapshot)?;
self.apply_id(snapshot)?; self.apply_id(snapshot)?;
self.apply_schema(snapshot)?; self.apply_schema(snapshot)?;
self.apply_name(snapshot)?;
Ok(()) Ok(())
} }
@@ -226,11 +266,13 @@ impl AdjacentMetadata {
/// ///
/// - The number of properties and attributes is 0 /// - The number of properties and attributes is 0
/// - `ignore_unknown_instances` is None /// - `ignore_unknown_instances` is None
/// - `name` is None
#[inline] #[inline]
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
self.attributes.is_empty() self.attributes.is_empty()
&& self.properties.is_empty() && self.properties.is_empty()
&& self.ignore_unknown_instances.is_none() && self.ignore_unknown_instances.is_none()
&& self.name.is_none()
} }
// TODO: Add method to allow selectively applying parts of metadata and // TODO: Add method to allow selectively applying parts of metadata and
@@ -262,6 +304,9 @@ pub struct DirectoryMetadata {
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub class_name: Option<Ustr>, pub class_name: Option<Ustr>,
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(skip)] #[serde(skip)]
pub path: PathBuf, pub path: PathBuf,
} }
@@ -372,6 +417,24 @@ impl DirectoryMetadata {
} }
} }
let name = snapshot
.old_inst()
.and_then(|inst| inst.metadata().specified_name.clone())
.or_else(|| {
// If this is a new instance and its name is invalid for the filesystem,
// we need to specify the name in meta.json so it can be preserved
if snapshot.old_inst().is_none() {
let instance_name = &snapshot.new_inst().name;
if validate_file_name(instance_name).is_err() {
Some(instance_name.clone())
} else {
None
}
} else {
None
}
});
Ok(Some(Self { Ok(Some(Self {
ignore_unknown_instances: if ignore_unknown_instances { ignore_unknown_instances: if ignore_unknown_instances {
Some(true) Some(true)
@@ -384,6 +447,7 @@ impl DirectoryMetadata {
path, path,
id: None, id: None,
schema, schema,
name,
})) }))
} }
@@ -393,6 +457,7 @@ impl DirectoryMetadata {
self.apply_properties(snapshot)?; self.apply_properties(snapshot)?;
self.apply_id(snapshot)?; self.apply_id(snapshot)?;
self.apply_schema(snapshot)?; self.apply_schema(snapshot)?;
self.apply_name(snapshot)?;
Ok(()) Ok(())
} }
@@ -464,17 +529,33 @@ impl DirectoryMetadata {
snapshot.metadata.schema = self.schema.take(); snapshot.metadata.schema = self.schema.take();
Ok(()) Ok(())
} }
fn apply_name(&mut self, snapshot: &mut InstanceSnapshot) -> anyhow::Result<()> {
if self.name.is_some() && snapshot.metadata.specified_name.is_some() {
anyhow::bail!(
"cannot specify a name using {} (instance has a name from somewhere else)",
self.path.display()
);
}
if let Some(name) = &self.name {
snapshot.name = name.clone().into();
}
snapshot.metadata.specified_name = self.name.take();
Ok(())
}
/// Returns whether the metadata is 'empty', meaning it doesn't have anything /// Returns whether the metadata is 'empty', meaning it doesn't have anything
/// worth persisting in it. Specifically: /// worth persisting in it. Specifically:
/// ///
/// - The number of properties and attributes is 0 /// - The number of properties and attributes is 0
/// - `ignore_unknown_instances` is None /// - `ignore_unknown_instances` is None
/// - `class_name` is either None or not Some("Folder") /// - `class_name` is either None or not Some("Folder")
/// - `name` is None
#[inline] #[inline]
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
self.attributes.is_empty() self.attributes.is_empty()
&& self.properties.is_empty() && self.properties.is_empty()
&& self.ignore_unknown_instances.is_none() && self.ignore_unknown_instances.is_none()
&& self.name.is_none()
&& if let Some(class) = &self.class_name { && if let Some(class) = &self.class_name {
class == "Folder" class == "Folder"
} else { } else {

View File

@@ -83,6 +83,19 @@ pub fn snapshot_project(
// file being updated. // file being updated.
snapshot.metadata.relevant_paths.push(path.to_path_buf()); snapshot.metadata.relevant_paths.push(path.to_path_buf());
// When git filter is active, also register the project folder as a
// relevant path. This serves as a catch-all so that file changes
// not under any specific $path node can still walk up the directory
// tree and trigger a re-snapshot of the entire project.
if context.has_git_filter() {
if let Some(folder) = path.parent() {
let normalized = vfs
.canonicalize(folder)
.unwrap_or_else(|_| folder.to_path_buf());
snapshot.metadata.relevant_paths.push(normalized);
}
}
Ok(Some(snapshot)) Ok(Some(snapshot))
} }
None => Ok(None), None => Ok(None),
@@ -137,6 +150,26 @@ pub fn snapshot_project_node(
// Take the snapshot's metadata as-is, which will be mutated later // Take the snapshot's metadata as-is, which will be mutated later
// on. // on.
metadata = snapshot.metadata; metadata = snapshot.metadata;
} else if context.has_git_filter() {
// When the git filter is active and the $path was filtered out
// (no acknowledged files yet), we still need to register the path
// in relevant_paths. This allows the change processor to map file
// changes in this directory back to this project node instance,
// triggering a re-snapshot that will pick up newly modified files.
let normalized = vfs
.canonicalize(full_path.as_ref())
.unwrap_or_else(|_| full_path.to_path_buf());
metadata.relevant_paths.push(normalized);
// The VFS only sets up file watches via read() and read_dir(),
// not via metadata(). Since the git filter caused snapshot_from_vfs
// to return early (before read_dir was called), the VFS is not
// watching this path. We must read the directory here to ensure
// the VFS sets up a recursive watch, otherwise file change events
// will never fire and live sync won't detect modifications.
if full_path.is_dir() {
let _ = vfs.read_dir(&full_path);
}
} }
} }

View File

@@ -28,7 +28,7 @@ pub fn snapshot_rbxm(
.metadata( .metadata(
InstanceMetadata::new() InstanceMetadata::new()
.instigating_source(path) .instigating_source(path)
.relevant_paths(vec![path.to_path_buf()]) .relevant_paths(vec![vfs.canonicalize(path)?])
.context(context), .context(context),
); );

View File

@@ -31,7 +31,7 @@ pub fn snapshot_rbxmx(
.metadata( .metadata(
InstanceMetadata::new() InstanceMetadata::new()
.instigating_source(path) .instigating_source(path)
.relevant_paths(vec![path.to_path_buf()]) .relevant_paths(vec![vfs.canonicalize(path)?])
.context(context), .context(context),
); );

View File

@@ -31,7 +31,7 @@ pub fn snapshot_toml(
.metadata( .metadata(
InstanceMetadata::new() InstanceMetadata::new()
.instigating_source(path) .instigating_source(path)
.relevant_paths(vec![path.to_path_buf()]) .relevant_paths(vec![vfs.canonicalize(path)?])
.context(context), .context(context),
); );

View File

@@ -28,7 +28,7 @@ pub fn snapshot_txt(
.metadata( .metadata(
InstanceMetadata::new() InstanceMetadata::new()
.instigating_source(path) .instigating_source(path)
.relevant_paths(vec![path.to_path_buf()]) .relevant_paths(vec![vfs.canonicalize(path)?])
.context(context), .context(context),
); );

View File

@@ -37,7 +37,7 @@ pub fn snapshot_yaml(
.metadata( .metadata(
InstanceMetadata::new() InstanceMetadata::new()
.instigating_source(path) .instigating_source(path)
.relevant_paths(vec![path.to_path_buf()]) .relevant_paths(vec![vfs.canonicalize(path)?])
.context(context), .context(context),
); );

View File

@@ -8,11 +8,11 @@ use rbx_dom_weak::Instance;
use crate::{snapshot::InstanceWithMeta, snapshot_middleware::Middleware}; use crate::{snapshot::InstanceWithMeta, snapshot_middleware::Middleware};
pub fn name_for_inst<'old>( pub fn name_for_inst<'a>(
middleware: Middleware, middleware: Middleware,
new_inst: &Instance, new_inst: &'a Instance,
old_inst: Option<InstanceWithMeta<'old>>, old_inst: Option<InstanceWithMeta<'a>>,
) -> anyhow::Result<Cow<'old, str>> { ) -> anyhow::Result<Cow<'a, str>> {
if let Some(old_inst) = old_inst { if let Some(old_inst) = old_inst {
if let Some(source) = old_inst.metadata().relevant_paths.first() { if let Some(source) = old_inst.metadata().relevant_paths.first() {
source source
@@ -35,14 +35,24 @@ pub fn name_for_inst<'old>(
| Middleware::CsvDir | Middleware::CsvDir
| Middleware::ServerScriptDir | Middleware::ServerScriptDir
| Middleware::ClientScriptDir | Middleware::ClientScriptDir
| Middleware::ModuleScriptDir => Cow::Owned(new_inst.name.clone()), | Middleware::ModuleScriptDir => {
if validate_file_name(&new_inst.name).is_err() {
Cow::Owned(slugify_name(&new_inst.name))
} else {
Cow::Borrowed(&new_inst.name)
}
}
_ => { _ => {
let extension = extension_for_middleware(middleware); let extension = extension_for_middleware(middleware);
let name = &new_inst.name; let slugified;
validate_file_name(name).with_context(|| { let final_name = if validate_file_name(&new_inst.name).is_err() {
format!("name '{name}' is not legal to write to the file system") slugified = slugify_name(&new_inst.name);
})?; &slugified
Cow::Owned(format!("{name}.{extension}")) } else {
&new_inst.name
};
Cow::Owned(format!("{final_name}.{extension}"))
} }
}) })
} }
@@ -94,6 +104,39 @@ const INVALID_WINDOWS_NAMES: [&str; 22] = [
/// in a file's name. /// in a file's name.
const FORBIDDEN_CHARS: [char; 9] = ['<', '>', ':', '"', '/', '|', '?', '*', '\\']; const FORBIDDEN_CHARS: [char; 9] = ['<', '>', ':', '"', '/', '|', '?', '*', '\\'];
/// Slugifies a name by replacing forbidden characters with underscores
/// and ensuring the result is a valid file name
pub fn slugify_name(name: &str) -> String {
let mut result = String::with_capacity(name.len());
for ch in name.chars() {
if FORBIDDEN_CHARS.contains(&ch) {
result.push('_');
} else {
result.push(ch);
}
}
// Handle Windows reserved names by appending an underscore
let result_lower = result.to_lowercase();
for forbidden in INVALID_WINDOWS_NAMES {
if result_lower == forbidden.to_lowercase() {
result.push('_');
break;
}
}
while result.ends_with(' ') || result.ends_with('.') {
result.pop();
}
if result.is_empty() || result.chars().all(|c| c == '_') {
result = "instance".to_string();
}
result
}
/// Validates a provided file name to ensure it's allowed on the file system. An /// Validates a provided file name to ensure it's allowed on the file system. An
/// error is returned if the name isn't allowed, indicating why. /// error is returned if the name isn't allowed, indicating why.
/// This takes into account rules for Windows, MacOS, and Linux. /// This takes into account rules for Windows, MacOS, and Linux.

View File

@@ -28,7 +28,7 @@ use crate::{
Project, Project,
}; };
pub use file_names::{extension_for_middleware, name_for_inst, validate_file_name}; pub use file_names::{extension_for_middleware, name_for_inst, slugify_name, validate_file_name};
pub use fs_snapshot::FsSnapshot; pub use fs_snapshot::FsSnapshot;
pub use hash::*; pub use hash::*;
pub use property_filter::{filter_properties, filter_properties_preallocated}; pub use property_filter::{filter_properties, filter_properties_preallocated};
@@ -301,6 +301,7 @@ pub fn get_best_middleware(snapshot: &SyncbackSnapshot) -> Middleware {
static JSON_MODEL_CLASSES: OnceLock<HashSet<&str>> = OnceLock::new(); static JSON_MODEL_CLASSES: OnceLock<HashSet<&str>> = OnceLock::new();
let json_model_classes = JSON_MODEL_CLASSES.get_or_init(|| { let json_model_classes = JSON_MODEL_CLASSES.get_or_init(|| {
[ [
"Actor",
"Sound", "Sound",
"SoundGroup", "SoundGroup",
"Sky", "Sky",
@@ -318,6 +319,11 @@ pub fn get_best_middleware(snapshot: &SyncbackSnapshot) -> Middleware {
"ChatInputBarConfiguration", "ChatInputBarConfiguration",
"BubbleChatConfiguration", "BubbleChatConfiguration",
"ChannelTabsConfiguration", "ChannelTabsConfiguration",
"RemoteEvent",
"UnreliableRemoteEvent",
"RemoteFunction",
"BindableEvent",
"BindableFunction",
] ]
.into() .into()
}); });

View File

@@ -1,13 +1,7 @@
//! Defines Rojo's HTTP API, all under /api. These endpoints generally return //! Defines Rojo's HTTP API, all under /api. These endpoints generally return
//! JSON. //! JSON.
use std::{ use std::{collections::HashMap, fs, path::PathBuf, str::FromStr, sync::Arc};
collections::{HashMap, HashSet},
fs,
path::PathBuf,
str::FromStr,
sync::Arc,
};
use futures::{sink::SinkExt, stream::StreamExt}; use futures::{sink::SinkExt, stream::StreamExt};
use hyper::{body, Body, Method, Request, Response, StatusCode}; use hyper::{body, Body, Method, Request, Response, StatusCode};
@@ -19,7 +13,6 @@ use rbx_dom_weak::{
}; };
use crate::{ use crate::{
json,
serve_session::ServeSession, serve_session::ServeSession,
snapshot::{InstanceWithMeta, PatchSet, PatchUpdate}, snapshot::{InstanceWithMeta, PatchSet, PatchUpdate},
web::{ web::{
@@ -28,9 +21,11 @@ use crate::{
ServerInfoResponse, SocketPacket, SocketPacketBody, SocketPacketType, SubscribeMessage, ServerInfoResponse, SocketPacket, SocketPacketBody, SocketPacketType, SubscribeMessage,
WriteRequest, WriteResponse, PROTOCOL_VERSION, SERVER_VERSION, WriteRequest, WriteResponse, PROTOCOL_VERSION, SERVER_VERSION,
}, },
util::{json, json_ok}, util::{deserialize_msgpack, msgpack, msgpack_ok, serialize_msgpack},
},
web_api::{
InstanceUpdate, RefPatchRequest, RefPatchResponse, SerializeRequest, SerializeResponse,
}, },
web_api::{BufferEncode, InstanceUpdate, RefPatchResponse, SerializeResponse},
}; };
pub async fn call(serve_session: Arc<ServeSession>, mut request: Request<Body>) -> Response<Body> { pub async fn call(serve_session: Arc<ServeSession>, mut request: Request<Body>) -> Response<Body> {
@@ -45,7 +40,7 @@ pub async fn call(serve_session: Arc<ServeSession>, mut request: Request<Body>)
if is_upgrade_request(&request) { if is_upgrade_request(&request) {
service.handle_api_socket(&mut request).await service.handle_api_socket(&mut request).await
} else { } else {
json( msgpack(
ErrorResponse::bad_request( ErrorResponse::bad_request(
"/api/socket must be called as a websocket upgrade request", "/api/socket must be called as a websocket upgrade request",
), ),
@@ -53,19 +48,15 @@ pub async fn call(serve_session: Arc<ServeSession>, mut request: Request<Body>)
) )
} }
} }
(&Method::GET, path) if path.starts_with("/api/serialize/") => { (&Method::POST, "/api/serialize") => service.handle_api_serialize(request).await,
service.handle_api_serialize(request).await (&Method::POST, "/api/ref-patch") => service.handle_api_ref_patch(request).await,
}
(&Method::GET, path) if path.starts_with("/api/ref-patch/") => {
service.handle_api_ref_patch(request).await
}
(&Method::POST, path) if path.starts_with("/api/open/") => { (&Method::POST, path) if path.starts_with("/api/open/") => {
service.handle_api_open(request).await service.handle_api_open(request).await
} }
(&Method::POST, "/api/write") => service.handle_api_write(request).await, (&Method::POST, "/api/write") => service.handle_api_write(request).await,
(_method, path) => json( (_method, path) => msgpack(
ErrorResponse::not_found(format!("Route not found: {}", path)), ErrorResponse::not_found(format!("Route not found: {}", path)),
StatusCode::NOT_FOUND, StatusCode::NOT_FOUND,
), ),
@@ -86,7 +77,7 @@ impl ApiService {
let tree = self.serve_session.tree(); let tree = self.serve_session.tree();
let root_instance_id = tree.get_root_id(); let root_instance_id = tree.get_root_id();
json_ok(&ServerInfoResponse { msgpack_ok(&ServerInfoResponse {
server_version: SERVER_VERSION.to_owned(), server_version: SERVER_VERSION.to_owned(),
protocol_version: PROTOCOL_VERSION, protocol_version: PROTOCOL_VERSION,
session_id: self.serve_session.session_id(), session_id: self.serve_session.session_id(),
@@ -105,7 +96,7 @@ impl ApiService {
let input_cursor: u32 = match argument.parse() { let input_cursor: u32 = match argument.parse() {
Ok(v) => v, Ok(v) => v,
Err(err) => { Err(err) => {
return json( return msgpack(
ErrorResponse::bad_request(format!("Malformed message cursor: {}", err)), ErrorResponse::bad_request(format!("Malformed message cursor: {}", err)),
StatusCode::BAD_REQUEST, StatusCode::BAD_REQUEST,
); );
@@ -116,7 +107,7 @@ impl ApiService {
let (response, websocket) = match upgrade(request, None) { let (response, websocket) = match upgrade(request, None) {
Ok(result) => result, Ok(result) => result,
Err(err) => { Err(err) => {
return json( return msgpack(
ErrorResponse::internal_error(format!("WebSocket upgrade failed: {}", err)), ErrorResponse::internal_error(format!("WebSocket upgrade failed: {}", err)),
StatusCode::INTERNAL_SERVER_ERROR, StatusCode::INTERNAL_SERVER_ERROR,
); );
@@ -143,10 +134,10 @@ impl ApiService {
let body = body::to_bytes(request.into_body()).await.unwrap(); let body = body::to_bytes(request.into_body()).await.unwrap();
let request: WriteRequest = match json::from_slice(&body) { let request: WriteRequest = match deserialize_msgpack(&body) {
Ok(request) => request, Ok(request) => request,
Err(err) => { Err(err) => {
return json( return msgpack(
ErrorResponse::bad_request(format!("Invalid body: {}", err)), ErrorResponse::bad_request(format!("Invalid body: {}", err)),
StatusCode::BAD_REQUEST, StatusCode::BAD_REQUEST,
); );
@@ -154,7 +145,7 @@ impl ApiService {
}; };
if request.session_id != session_id { if request.session_id != session_id {
return json( return msgpack(
ErrorResponse::bad_request("Wrong session ID"), ErrorResponse::bad_request("Wrong session ID"),
StatusCode::BAD_REQUEST, StatusCode::BAD_REQUEST,
); );
@@ -180,7 +171,7 @@ impl ApiService {
}) })
.unwrap(); .unwrap();
json_ok(WriteResponse { session_id }) msgpack_ok(WriteResponse { session_id })
} }
async fn handle_api_read(&self, request: Request<Body>) -> Response<Body> { async fn handle_api_read(&self, request: Request<Body>) -> Response<Body> {
@@ -190,7 +181,7 @@ impl ApiService {
let requested_ids = match requested_ids { let requested_ids = match requested_ids {
Ok(ids) => ids, Ok(ids) => ids,
Err(_) => { Err(_) => {
return json( return msgpack(
ErrorResponse::bad_request("Malformed ID list"), ErrorResponse::bad_request("Malformed ID list"),
StatusCode::BAD_REQUEST, StatusCode::BAD_REQUEST,
); );
@@ -214,7 +205,7 @@ impl ApiService {
} }
} }
json_ok(ReadResponse { msgpack_ok(ReadResponse {
session_id: self.serve_session.session_id(), session_id: self.serve_session.session_id(),
message_cursor, message_cursor,
instances, instances,
@@ -229,22 +220,30 @@ impl ApiService {
/// that correspond to the requested Instances. These values have their /// that correspond to the requested Instances. These values have their
/// `Value` property set to point to the requested Instance. /// `Value` property set to point to the requested Instance.
async fn handle_api_serialize(&self, request: Request<Body>) -> Response<Body> { async fn handle_api_serialize(&self, request: Request<Body>) -> Response<Body> {
let argument = &request.uri().path()["/api/serialize/".len()..]; let session_id = self.serve_session.session_id();
let requested_ids: Result<Vec<Ref>, _> = argument.split(',').map(Ref::from_str).collect(); let body = body::to_bytes(request.into_body()).await.unwrap();
let requested_ids = match requested_ids { let request: SerializeRequest = match deserialize_msgpack(&body) {
Ok(ids) => ids, Ok(request) => request,
Err(_) => { Err(err) => {
return json( return msgpack(
ErrorResponse::bad_request("Malformed ID list"), ErrorResponse::bad_request(format!("Invalid body: {}", err)),
StatusCode::BAD_REQUEST, StatusCode::BAD_REQUEST,
); );
} }
}; };
if request.session_id != session_id {
return msgpack(
ErrorResponse::bad_request("Wrong session ID"),
StatusCode::BAD_REQUEST,
);
}
let mut response_dom = WeakDom::new(InstanceBuilder::new("Folder")); let mut response_dom = WeakDom::new(InstanceBuilder::new("Folder"));
let tree = self.serve_session.tree(); let tree = self.serve_session.tree();
for id in &requested_ids { for id in &request.ids {
if let Some(instance) = tree.get_instance(*id) { if let Some(instance) = tree.get_instance(*id) {
let clone = response_dom.insert( let clone = response_dom.insert(
Ref::none(), Ref::none(),
@@ -268,7 +267,7 @@ impl ApiService {
response_dom.transfer_within(child_ref, object_value); response_dom.transfer_within(child_ref, object_value);
} else { } else {
json( msgpack(
ErrorResponse::bad_request(format!("provided id {id} is not in the tree")), ErrorResponse::bad_request(format!("provided id {id} is not in the tree")),
StatusCode::BAD_REQUEST, StatusCode::BAD_REQUEST,
); );
@@ -279,9 +278,9 @@ impl ApiService {
let mut source = Vec::new(); let mut source = Vec::new();
rbx_binary::to_writer(&mut source, &response_dom, &[response_dom.root_ref()]).unwrap(); rbx_binary::to_writer(&mut source, &response_dom, &[response_dom.root_ref()]).unwrap();
json_ok(SerializeResponse { msgpack_ok(SerializeResponse {
session_id: self.serve_session.session_id(), session_id: self.serve_session.session_id(),
model_contents: BufferEncode::new(source), model_contents: source,
}) })
} }
@@ -290,20 +289,26 @@ impl ApiService {
/// and referent properties need to be updated after the serialize /// and referent properties need to be updated after the serialize
/// endpoint is used. /// endpoint is used.
async fn handle_api_ref_patch(self, request: Request<Body>) -> Response<Body> { async fn handle_api_ref_patch(self, request: Request<Body>) -> Response<Body> {
let argument = &request.uri().path()["/api/ref-patch/".len()..]; let session_id = self.serve_session.session_id();
let requested_ids: Result<HashSet<Ref>, _> = let body = body::to_bytes(request.into_body()).await.unwrap();
argument.split(',').map(Ref::from_str).collect();
let requested_ids = match requested_ids { let request: RefPatchRequest = match deserialize_msgpack(&body) {
Ok(ids) => ids, Ok(request) => request,
Err(_) => { Err(err) => {
return json( return msgpack(
ErrorResponse::bad_request("Malformed ID list"), ErrorResponse::bad_request(format!("Invalid body: {}", err)),
StatusCode::BAD_REQUEST, StatusCode::BAD_REQUEST,
); );
} }
}; };
if request.session_id != session_id {
return msgpack(
ErrorResponse::bad_request("Wrong session ID"),
StatusCode::BAD_REQUEST,
);
}
let mut instance_updates: HashMap<Ref, InstanceUpdate> = HashMap::new(); let mut instance_updates: HashMap<Ref, InstanceUpdate> = HashMap::new();
let tree = self.serve_session.tree(); let tree = self.serve_session.tree();
@@ -312,7 +317,7 @@ impl ApiService {
let Variant::Ref(prop_value) = prop_value else { let Variant::Ref(prop_value) = prop_value else {
continue; continue;
}; };
if let Some(target_id) = requested_ids.get(prop_value) { if let Some(target_id) = request.ids.get(prop_value) {
let instance_id = instance.id(); let instance_id = instance.id();
let update = let update =
instance_updates instance_updates
@@ -331,7 +336,7 @@ impl ApiService {
} }
} }
json_ok(RefPatchResponse { msgpack_ok(RefPatchResponse {
session_id: self.serve_session.session_id(), session_id: self.serve_session.session_id(),
patch: SubscribeMessage { patch: SubscribeMessage {
added: HashMap::new(), added: HashMap::new(),
@@ -347,7 +352,7 @@ impl ApiService {
let requested_id = match Ref::from_str(argument) { let requested_id = match Ref::from_str(argument) {
Ok(id) => id, Ok(id) => id,
Err(_) => { Err(_) => {
return json( return msgpack(
ErrorResponse::bad_request("Invalid instance ID"), ErrorResponse::bad_request("Invalid instance ID"),
StatusCode::BAD_REQUEST, StatusCode::BAD_REQUEST,
); );
@@ -359,7 +364,7 @@ impl ApiService {
let instance = match tree.get_instance(requested_id) { let instance = match tree.get_instance(requested_id) {
Some(instance) => instance, Some(instance) => instance,
None => { None => {
return json( return msgpack(
ErrorResponse::bad_request("Instance not found"), ErrorResponse::bad_request("Instance not found"),
StatusCode::NOT_FOUND, StatusCode::NOT_FOUND,
); );
@@ -369,7 +374,7 @@ impl ApiService {
let script_path = match pick_script_path(instance) { let script_path = match pick_script_path(instance) {
Some(path) => path, Some(path) => path,
None => { None => {
return json( return msgpack(
ErrorResponse::bad_request( ErrorResponse::bad_request(
"No appropriate file could be found to open this script", "No appropriate file could be found to open this script",
), ),
@@ -382,7 +387,7 @@ impl ApiService {
Ok(()) => {} Ok(()) => {}
Err(error) => match error { Err(error) => match error {
OpenError::Io(io_error) => { OpenError::Io(io_error) => {
return json( return msgpack(
ErrorResponse::internal_error(format!( ErrorResponse::internal_error(format!(
"Attempting to open {} failed because of the following io error: {}", "Attempting to open {} failed because of the following io error: {}",
script_path.display(), script_path.display(),
@@ -396,7 +401,7 @@ impl ApiService {
status, status,
stderr, stderr,
} => { } => {
return json( return msgpack(
ErrorResponse::internal_error(format!( ErrorResponse::internal_error(format!(
r#"The command '{}' to open '{}' failed with the error code '{}'. r#"The command '{}' to open '{}' failed with the error code '{}'.
Error logs: Error logs:
@@ -412,7 +417,7 @@ impl ApiService {
}, },
}; };
json_ok(OpenResponse { msgpack_ok(OpenResponse {
session_id: self.serve_session.session_id(), session_id: self.serve_session.session_id(),
}) })
} }
@@ -476,7 +481,7 @@ async fn handle_websocket_subscription(
match result { match result {
Ok((new_cursor, messages)) => { Ok((new_cursor, messages)) => {
if !messages.is_empty() { if !messages.is_empty() {
let json_message = { let msgpack_message = {
let tree = tree_handle.lock().unwrap(); let tree = tree_handle.lock().unwrap();
let api_messages = messages let api_messages = messages
.into_iter() .into_iter()
@@ -492,12 +497,12 @@ async fn handle_websocket_subscription(
}), }),
}; };
serde_json::to_string(&response)? serialize_msgpack(response)?
}; };
log::debug!("Sending batch of messages over WebSocket subscription"); log::debug!("Sending batch of messages over WebSocket subscription");
if websocket.send(Message::Text(json_message)).await.is_err() { if websocket.send(Message::Binary(msgpack_message)).await.is_err() {
// Client disconnected // Client disconnected
log::debug!("WebSocket subscription closed by client"); log::debug!("WebSocket subscription closed by client");
break; break;

View File

@@ -238,35 +238,26 @@ pub struct OpenResponse {
pub session_id: SessionId, pub session_id: SessionId,
} }
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct SerializeRequest {
pub session_id: SessionId,
pub ids: Vec<Ref>,
}
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct SerializeResponse { pub struct SerializeResponse {
pub session_id: SessionId, pub session_id: SessionId,
pub model_contents: BufferEncode, #[serde(with = "serde_bytes")]
pub model_contents: Vec<u8>,
} }
/// Using this struct we can force Roblox to JSONDecode this as a buffer.
/// This is what Roblox's serde APIs use, so it saves a step in the plugin.
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct BufferEncode { #[serde(rename_all = "camelCase")]
m: (), pub struct RefPatchRequest {
t: Cow<'static, str>, pub session_id: SessionId,
base64: String, pub ids: HashSet<Ref>,
}
impl BufferEncode {
pub fn new(content: Vec<u8>) -> Self {
let base64 = data_encoding::BASE64.encode(&content);
Self {
m: (),
t: Cow::Borrowed("buffer"),
base64,
}
}
pub fn model(&self) -> &str {
&self.base64
}
} }
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]

View File

@@ -1,8 +1,48 @@
use hyper::{header::CONTENT_TYPE, Body, Response, StatusCode}; use hyper::{header::CONTENT_TYPE, Body, Response, StatusCode};
use serde::Serialize; use serde::{Deserialize, Serialize};
pub fn json_ok<T: Serialize>(value: T) -> Response<Body> { pub fn msgpack_ok<T: Serialize>(value: T) -> Response<Body> {
json(value, StatusCode::OK) msgpack(value, StatusCode::OK)
}
pub fn msgpack<T: Serialize>(value: T, code: StatusCode) -> Response<Body> {
let mut serialized = Vec::new();
let mut serializer = rmp_serde::Serializer::new(&mut serialized)
.with_human_readable()
.with_struct_map();
if let Err(err) = value.serialize(&mut serializer) {
return Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.header(CONTENT_TYPE, "text/plain")
.body(Body::from(err.to_string()))
.unwrap();
};
Response::builder()
.status(code)
.header(CONTENT_TYPE, "application/msgpack")
.body(Body::from(serialized))
.unwrap()
}
pub fn serialize_msgpack<T: Serialize>(value: T) -> anyhow::Result<Vec<u8>> {
let mut serialized = Vec::new();
let mut serializer = rmp_serde::Serializer::new(&mut serialized)
.with_human_readable()
.with_struct_map();
value.serialize(&mut serializer)?;
Ok(serialized)
}
pub fn deserialize_msgpack<'a, T: Deserialize<'a>>(
input: &'a [u8],
) -> Result<T, rmp_serde::decode::Error> {
let mut deserializer = rmp_serde::Deserializer::new(input).with_human_readable();
T::deserialize(&mut deserializer)
} }
pub fn json<T: Serialize>(value: T, code: StatusCode) -> Response<Body> { pub fn json<T: Serialize>(value: T, code: StatusCode) -> Response<Body> {

View File

@@ -0,0 +1,14 @@
{
"name": "default",
"tree": {
"$className": "DataModel",
"ReplicatedStorage": {
"Project": {
"$path": "project/src",
"Module": {
"$path": "module"
}
}
}
}
}

View File

@@ -0,0 +1 @@
return nil

View File

@@ -0,0 +1,14 @@
{
"name": "default",
"tree": {
"$className": "DataModel",
"ReplicatedStorage": {
"Project": {
"$path": "src/",
"Module": {
"$path": "../module"
}
}
}
}
}

View File

@@ -0,0 +1 @@
return nil

View File

@@ -1,5 +1,4 @@
use std::{ use std::{
fmt::Write as _,
fs, fs,
path::{Path, PathBuf}, path::{Path, PathBuf},
process::Command, process::Command,
@@ -11,10 +10,15 @@ use std::{
use hyper_tungstenite::tungstenite::{connect, Message}; use hyper_tungstenite::tungstenite::{connect, Message};
use rbx_dom_weak::types::Ref; use rbx_dom_weak::types::Ref;
use serde::{Deserialize, Serialize};
use tempfile::{tempdir, TempDir}; use tempfile::{tempdir, TempDir};
use librojo::web_api::{ use librojo::{
ReadResponse, SerializeResponse, ServerInfoResponse, SocketPacket, SocketPacketType, web_api::{
ReadResponse, SerializeRequest, SerializeResponse, ServerInfoResponse, SocketPacket,
SocketPacketType,
},
SessionId,
}; };
use rojo_insta_ext::RedactionMap; use rojo_insta_ext::RedactionMap;
@@ -158,22 +162,16 @@ impl TestServeSession {
pub fn get_api_rojo(&self) -> Result<ServerInfoResponse, reqwest::Error> { pub fn get_api_rojo(&self) -> Result<ServerInfoResponse, reqwest::Error> {
let url = format!("http://localhost:{}/api/rojo", self.port); let url = format!("http://localhost:{}/api/rojo", self.port);
let body = reqwest::blocking::get(url)?.text()?; let body = reqwest::blocking::get(url)?.bytes()?;
let value = jsonc_parser::parse_to_serde_value(&body, &Default::default()) Ok(deserialize_msgpack(&body).expect("Server returned malformed response"))
.expect("Failed to parse JSON")
.expect("No JSON value");
Ok(serde_json::from_value(value).expect("Server returned malformed response"))
} }
pub fn get_api_read(&self, id: Ref) -> Result<ReadResponse<'_>, reqwest::Error> { pub fn get_api_read(&self, id: Ref) -> Result<ReadResponse<'_>, reqwest::Error> {
let url = format!("http://localhost:{}/api/read/{}", self.port, id); let url = format!("http://localhost:{}/api/read/{}", self.port, id);
let body = reqwest::blocking::get(url)?.text()?; let body = reqwest::blocking::get(url)?.bytes()?;
let value = jsonc_parser::parse_to_serde_value(&body, &Default::default()) Ok(deserialize_msgpack(&body).expect("Server returned malformed response"))
.expect("Failed to parse JSON")
.expect("No JSON value");
Ok(serde_json::from_value(value).expect("Server returned malformed response"))
} }
pub fn get_api_socket_packet( pub fn get_api_socket_packet(
@@ -195,8 +193,8 @@ impl TestServeSession {
} }
match socket.read() { match socket.read() {
Ok(Message::Text(text)) => { Ok(Message::Binary(binary)) => {
let packet: SocketPacket = serde_json::from_str(&text)?; let packet: SocketPacket = deserialize_msgpack(&binary)?;
if packet.packet_type != packet_type { if packet.packet_type != packet_type {
continue; continue;
} }
@@ -209,7 +207,7 @@ impl TestServeSession {
return Err("WebSocket closed before receiving messages".into()); return Err("WebSocket closed before receiving messages".into());
} }
Ok(_) => { Ok(_) => {
// Ignore other message types (ping, pong, binary) // Ignore other message types (ping, pong, text)
continue; continue;
} }
Err(hyper_tungstenite::tungstenite::Error::Io(e)) Err(hyper_tungstenite::tungstenite::Error::Io(e))
@@ -226,19 +224,44 @@ impl TestServeSession {
} }
} }
pub fn get_api_serialize(&self, ids: &[Ref]) -> Result<SerializeResponse, reqwest::Error> { pub fn get_api_serialize(
let mut id_list = String::with_capacity(ids.len() * 33); &self,
for id in ids { ids: &[Ref],
write!(id_list, "{id},").unwrap(); session_id: SessionId,
} ) -> Result<SerializeResponse, reqwest::Error> {
id_list.pop(); let client = reqwest::blocking::Client::new();
let url = format!("http://localhost:{}/api/serialize", self.port);
let body = serialize_msgpack(&SerializeRequest {
session_id,
ids: ids.to_vec(),
})
.unwrap();
let url = format!("http://localhost:{}/api/serialize/{}", self.port, id_list); let body = client.post(url).body(body).send()?.bytes()?;
reqwest::blocking::get(url)?.json() Ok(deserialize_msgpack(&body).expect("Server returned malformed response"))
} }
} }
fn serialize_msgpack<T: Serialize>(value: T) -> Result<Vec<u8>, rmp_serde::encode::Error> {
let mut serialized = Vec::new();
let mut serializer = rmp_serde::Serializer::new(&mut serialized)
.with_human_readable()
.with_struct_map();
value.serialize(&mut serializer)?;
Ok(serialized)
}
fn deserialize_msgpack<'a, T: Deserialize<'a>>(
input: &'a [u8],
) -> Result<T, rmp_serde::decode::Error> {
let mut deserializer = rmp_serde::Deserializer::new(input).with_human_readable();
T::deserialize(&mut deserializer)
}
/// Probably-okay way to generate random enough port numbers for running the /// Probably-okay way to generate random enough port numbers for running the
/// Rojo live server. /// Rojo live server.
/// ///
@@ -256,11 +279,7 @@ fn get_port_number() -> usize {
/// Since the provided structure intentionally includes unredacted referents, /// Since the provided structure intentionally includes unredacted referents,
/// some post-processing is done to ensure they don't show up in the model. /// some post-processing is done to ensure they don't show up in the model.
pub fn serialize_to_xml_model(response: &SerializeResponse, redactions: &RedactionMap) -> String { pub fn serialize_to_xml_model(response: &SerializeResponse, redactions: &RedactionMap) -> String {
let model_content = data_encoding::BASE64 let mut dom = rbx_binary::from_reader(response.model_contents.as_slice()).unwrap();
.decode(response.model_contents.model().as_bytes())
.unwrap();
let mut dom = rbx_binary::from_reader(model_content.as_slice()).unwrap();
// This makes me realize that maybe we need a `descendants_mut` iter. // This makes me realize that maybe we need a `descendants_mut` iter.
let ref_list: Vec<Ref> = dom.descendants().map(|inst| inst.referent()).collect(); let ref_list: Vec<Ref> = dom.descendants().map(|inst| inst.referent()).collect();
for referent in ref_list { for referent in ref_list {

View File

@@ -41,7 +41,6 @@ gen_build_tests! {
issue_546, issue_546,
json_as_lua, json_as_lua,
json_model_in_folder, json_model_in_folder,
json_model_legacy_name,
module_in_folder, module_in_folder,
module_init, module_init,
nested_runcontext, nested_runcontext,
@@ -55,6 +54,8 @@ gen_build_tests! {
script_meta_disabled, script_meta_disabled,
server_in_folder, server_in_folder,
server_init, server_init,
slugified_name_roundtrip,
model_json_name_input,
txt, txt,
txt_in_folder, txt_in_folder,
unresolved_values, unresolved_values,

View File

@@ -646,7 +646,7 @@ fn meshpart_with_id() {
.unwrap(); .unwrap();
let serialize_response = session let serialize_response = session
.get_api_serialize(&[*meshpart, *objectvalue]) .get_api_serialize(&[*meshpart, *objectvalue], info.session_id)
.unwrap(); .unwrap();
// We don't assert a snapshot on the SerializeResponse because the model includes the // We don't assert a snapshot on the SerializeResponse because the model includes the
@@ -673,7 +673,9 @@ fn forced_parent() {
read_response.intern_and_redact(&mut redactions, root_id) read_response.intern_and_redact(&mut redactions, root_id)
); );
let serialize_response = session.get_api_serialize(&[root_id]).unwrap(); let serialize_response = session
.get_api_serialize(&[root_id], info.session_id)
.unwrap();
assert_eq!(serialize_response.session_id, info.session_id); assert_eq!(serialize_response.session_id, info.session_id);

View File

@@ -86,4 +86,9 @@ syncback_tests! {
sync_rules => ["src/module.modulescript", "src/text.text"], sync_rules => ["src/module.modulescript", "src/text.text"],
// Ensures that the `syncUnscriptable` setting works // Ensures that the `syncUnscriptable` setting works
unscriptable_properties => ["default.project.json"], unscriptable_properties => ["default.project.json"],
// Ensures that instances with names containing illegal characters get slugified filenames
// and preserve their original names in meta.json without forcing directories for leaf scripts
slugified_name => ["src/_Script.meta.json", "src/_Script.server.luau", "src/_Folder/init.meta.json"],
// Ensures that .model.json files preserve the name property
model_json_name => ["src/foo.model.json"],
} }