Major Subsystem Rewrite (Reconciler Mk5) (#217)

This commit is contained in:
Lucien Greathouse
2019-08-27 15:00:37 -07:00
committed by GitHub
parent 8e8291a0bd
commit fea303ac8b
80 changed files with 3843 additions and 5609 deletions

17
Cargo.lock generated
View File

@@ -277,6 +277,14 @@ dependencies = [
"cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "crossbeam-channel"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "crossbeam-deque" name = "crossbeam-deque"
version = "0.7.1" version = "0.7.1"
@@ -701,6 +709,11 @@ name = "itoa"
version = "0.4.4" version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "jod-thread"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "kernel32-sys" name = "kernel32-sys"
version = "0.2.2" version = "0.2.2"
@@ -1501,11 +1514,13 @@ name = "rojo"
version = "0.5.0" version = "0.5.0"
dependencies = [ dependencies = [
"clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)", "clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
"csv 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "csv 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"futures 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.12.33 (registry+https://github.com/rust-lang/crates.io-index)", "hyper 0.12.33 (registry+https://github.com/rust-lang/crates.io-index)",
"jod-thread 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"maplit 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "maplit 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2175,6 +2190,7 @@ dependencies = [
"checksum core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "25b9e03f145fd4f2bf705e07b900cd41fc636598fe5dc452fd0db1441c3f496d" "checksum core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "25b9e03f145fd4f2bf705e07b900cd41fc636598fe5dc452fd0db1441c3f496d"
"checksum core-foundation-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e7ca8a5221364ef15ce201e8ed2f609fc312682a8f4e0e3d4aa5879764e0fa3b" "checksum core-foundation-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e7ca8a5221364ef15ce201e8ed2f609fc312682a8f4e0e3d4aa5879764e0fa3b"
"checksum crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ba125de2af0df55319f41944744ad91c71113bf74a4646efff39afe1f6842db1" "checksum crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ba125de2af0df55319f41944744ad91c71113bf74a4646efff39afe1f6842db1"
"checksum crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c8ec7fcd21571dc78f96cc96243cab8d8f035247c3efd16c687be154c3fa9efa"
"checksum crossbeam-deque 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b18cd2e169ad86297e6bc0ad9aa679aee9daa4f19e8163860faf7c164e4f5a71" "checksum crossbeam-deque 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b18cd2e169ad86297e6bc0ad9aa679aee9daa4f19e8163860faf7c164e4f5a71"
"checksum crossbeam-epoch 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "fedcd6772e37f3da2a9af9bf12ebe046c0dfe657992377b4df982a2b54cd37a9" "checksum crossbeam-epoch 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "fedcd6772e37f3da2a9af9bf12ebe046c0dfe657992377b4df982a2b54cd37a9"
"checksum crossbeam-queue 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7c979cd6cfe72335896575c6b5688da489e420d36a27a0b9eb0c73db574b4a4b" "checksum crossbeam-queue 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7c979cd6cfe72335896575c6b5688da489e420d36a27a0b9eb0c73db574b4a4b"
@@ -2223,6 +2239,7 @@ dependencies = [
"checksum insta 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "00eef45accbe65bfb859ad16649c6b4bed246768d89493473d9ab6c6a0eb908f" "checksum insta 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "00eef45accbe65bfb859ad16649c6b4bed246768d89493473d9ab6c6a0eb908f"
"checksum iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dbe6e417e7d0975db6512b90796e8ce223145ac4e33c377e4a42882a0e88bb08" "checksum iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dbe6e417e7d0975db6512b90796e8ce223145ac4e33c377e4a42882a0e88bb08"
"checksum itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "501266b7edd0174f8530248f87f99c88fbe60ca4ef3dd486835b8d8d53136f7f" "checksum itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "501266b7edd0174f8530248f87f99c88fbe60ca4ef3dd486835b8d8d53136f7f"
"checksum jod-thread 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2f52a11f73b88fab829a0e4d9e13ea5982c7ac457c72eb3541d82a4afdfce4ff"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
"checksum lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bc5729f27f159ddd61f4df6228e827e86643d4d3e7c32183cb30a1c08f604a14" "checksum lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bc5729f27f159ddd61f4df6228e827e86643d4d3e7c32183cb30a1c08f604a14"
"checksum lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f" "checksum lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f"

30
design.gv Normal file
View File

@@ -0,0 +1,30 @@
digraph Rojo {
concentrate = true;
node [fontname = "sans-serif"];
plugin [label="Roblox Studio Plugin"]
session [label="Session"]
rbx_tree [label="Instance Tree"]
imfs [label="In-Memory Filesystem"]
fs_impl [label="Filesystem Implementation\n(stubbed in tests)"]
fs [label="Real Filesystem"]
snapshot_subsystem [label="Snapshot Subsystem\n(reconciler)"]
snapshot_generator [label="Snapshot Generator"]
user_middleware [label="User Middleware\n(MoonScript, etc.)"]
builtin_middleware [label="Built-in Middleware\n(.lua, .rbxm, etc.)"]
api [label="Web API"]
file_watcher [label="File Watcher"]
session -> imfs
session -> rbx_tree
session -> snapshot_subsystem
session -> snapshot_generator
session -> file_watcher [dir="both"]
file_watcher -> imfs
snapshot_generator -> user_middleware
snapshot_generator -> builtin_middleware
plugin -> api [style="dotted"; dir="both"; minlen=2]
api -> session
imfs -> fs_impl
fs_impl -> fs
}

View File

@@ -0,0 +1 @@
This is a bare text file with no project.

View File

@@ -0,0 +1,25 @@
<roblox xmlns:xmime="http://www.w3.org/2005/05/xmlmime" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://www.roblox.com/roblox.xsd" version="4">
<Meta name="ExplicitAutoJoints">true</Meta>
<External>null</External>
<External>nil</External>
<Item class="Folder" referent="RBX82C2B70FBD6642A894EFCBA4FF284ADD">
<Properties>
<string name="Name">Container</string>
<BinaryString name="Tags"></BinaryString>
</Properties>
<Item class="StringValue" referent="RBX42D96C32E905427DBA530A1881C962FD">
<Properties>
<string name="Name">Target</string>
<BinaryString name="Tags"></BinaryString>
<string name="Value">Pointed to by ObjectValue</string>
</Properties>
</Item>
<Item class="ObjectValue" referent="RBXB5B6B1FDCC4942FA823B06B7ABA0EEDB">
<Properties>
<string name="Name">Pointer</string>
<BinaryString name="Tags"></BinaryString>
<Ref name="Value">RBX42D96C32E905427DBA530A1881C962FD</Ref>
</Properties>
</Item>
</Item>
</roblox>

View File

@@ -0,0 +1,6 @@
{
"name": "txt",
"tree": {
"$path": "foo.txt"
}
}

View File

@@ -0,0 +1 @@
This is a txt file in a project.

View File

@@ -31,13 +31,25 @@ gen_build_tests! {
json_model_legacy_name, json_model_legacy_name,
module_in_folder, module_in_folder,
module_init, module_init,
plain_gitkeep,
rbxm_in_folder, rbxm_in_folder,
rbxmx_in_folder, rbxmx_in_folder,
server_in_folder, server_in_folder,
server_init, server_init,
txt,
txt_in_folder, txt_in_folder,
} }
#[test]
fn build_plain_txt() {
run_build_test("plain.txt");
}
#[test]
fn build_rbxmx_ref() {
run_build_test("rbxmx_ref.rbxmx");
}
fn run_build_test(test_name: &str) { fn run_build_test(test_name: &str) {
let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR")); let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR"));
let build_test_path = manifest_dir.join("build-tests"); let build_test_path = manifest_dir.join("build-tests");

View File

@@ -0,0 +1,14 @@
---
created: "2019-08-09T00:17:42.253380600Z"
creator: insta@0.10.0
source: rojo-test/src/build_test.rs
expression: contents
---
<roblox version="4">
<Item class="StringValue" referent="0">
<Properties>
<string name="Name">plain</string>
<string name="Value">This is a bare text file with no project.</string>
</Properties>
</Item>
</roblox>

View File

@@ -0,0 +1,13 @@
---
created: "2019-08-09T00:17:42.175575800Z"
creator: insta@0.10.0
source: rojo-test/src/build_test.rs
expression: contents
---
<roblox version="4">
<Item class="Folder" referent="0">
<Properties>
<string name="Name">plain_gitkeep</string>
</Properties>
</Item>
</roblox>

View File

@@ -0,0 +1,28 @@
---
created: "2019-08-10T07:57:42.835269100Z"
creator: insta@0.10.0
source: rojo-test/src/build_test.rs
expression: contents
---
<roblox version="4">
<Item class="Folder" referent="0">
<Properties>
<string name="Name">rbxmx_ref</string>
<BinaryString name="Tags"><![CDATA[]]></BinaryString>
</Properties>
<Item class="StringValue" referent="1">
<Properties>
<string name="Name">Target</string>
<BinaryString name="Tags"><![CDATA[]]></BinaryString>
<string name="Value">Pointed to by ObjectValue</string>
</Properties>
</Item>
<Item class="ObjectValue" referent="2">
<Properties>
<string name="Name">Pointer</string>
<BinaryString name="Tags"><![CDATA[]]></BinaryString>
<Ref name="Value">1</Ref>
</Properties>
</Item>
</Item>
</roblox>

View File

@@ -0,0 +1,14 @@
---
created: "2019-08-09T00:22:01.983322Z"
creator: insta@0.10.0
source: rojo-test/src/build_test.rs
expression: contents
---
<roblox version="4">
<Item class="StringValue" referent="0">
<Properties>
<string name="Name">txt</string>
<string name="Value">This is a txt file in a project.</string>
</Properties>
</Item>
</roblox>

View File

@@ -21,29 +21,31 @@ path = "src/bin.rs"
[dependencies] [dependencies]
clap = "2.27" clap = "2.27"
crossbeam-channel = "0.3.9"
csv = "1.0" csv = "1.0"
env_logger = "0.6" env_logger = "0.6"
failure = "0.1.3" failure = "0.1.3"
futures = "0.1" futures = "0.1"
hyper = "0.12" hyper = "0.12"
jod-thread = "0.1.0"
log = "0.4" log = "0.4"
maplit = "1.0.1" maplit = "1.0.1"
notify = "4.0" notify = "4.0"
rbx_binary = "0.4.1" rbx_binary = "0.4.1"
rbx_dom_weak = "1.9.0" rbx_dom_weak = "1.9.0"
rbx_xml = "0.11.0"
rbx_reflection = "3.1.388" rbx_reflection = "3.1.388"
rbx_xml = "0.11.0"
regex = "1.0" regex = "1.0"
reqwest = "0.9.5" reqwest = "0.9.5"
rlua = "0.16"
ritz = "0.1.0" ritz = "0.1.0"
rlua = "0.16"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
uuid = { version = "0.7", features = ["v4", "serde"] } uuid = { version = "0.7", features = ["v4", "serde"] }
[dev-dependencies] [dev-dependencies]
tempfile = "3.0"
walkdir = "2.1"
lazy_static = "1.2" lazy_static = "1.2"
paste = "0.1"
pretty_assertions = "0.6.1" pretty_assertions = "0.6.1"
paste = "0.1" tempfile = "3.0"
walkdir = "2.1"

Binary file not shown.

View File

@@ -1,17 +1,17 @@
use std::{ use std::{
path::PathBuf, collections::HashMap,
fs::File, fs::File,
io::{self, Write, BufWriter}, io::{self, Write, BufWriter},
path::PathBuf,
}; };
use log::info; use rbx_dom_weak::{RbxTree, RbxInstanceProperties};
use failure::Fail; use failure::Fail;
use crate::{ use crate::{
imfs::{Imfs, FsError}, imfs::new::{Imfs, RealFetcher, WatchMode, FsError},
project::{Project, ProjectLoadError}, snapshot::{apply_patch_set, compute_patch_set},
rbx_session::construct_oneoff_tree, snapshot_middleware::snapshot_from_imfs,
rbx_snapshot::SnapshotError,
}; };
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
@@ -46,9 +46,6 @@ pub enum BuildError {
#[fail(display = "Could not detect what kind of file to create")] #[fail(display = "Could not detect what kind of file to create")]
UnknownOutputKind, UnknownOutputKind,
#[fail(display = "Project load error: {}", _0)]
ProjectLoadError(#[fail(cause)] ProjectLoadError),
#[fail(display = "IO error: {}", _0)] #[fail(display = "IO error: {}", _0)]
IoError(#[fail(cause)] io::Error), IoError(#[fail(cause)] io::Error),
@@ -60,18 +57,13 @@ pub enum BuildError {
#[fail(display = "{}", _0)] #[fail(display = "{}", _0)]
FsError(#[fail(cause)] FsError), FsError(#[fail(cause)] FsError),
#[fail(display = "{}", _0)]
SnapshotError(#[fail(cause)] SnapshotError),
} }
impl_from!(BuildError { impl_from!(BuildError {
ProjectLoadError => ProjectLoadError,
io::Error => IoError, io::Error => IoError,
rbx_xml::EncodeError => XmlModelEncodeError, rbx_xml::EncodeError => XmlModelEncodeError,
rbx_binary::EncodeError => BinaryModelEncodeError, rbx_binary::EncodeError => BinaryModelEncodeError,
FsError => FsError, FsError => FsError,
SnapshotError => SnapshotError,
}); });
fn xml_encode_config() -> rbx_xml::EncodeOptions { fn xml_encode_config() -> rbx_xml::EncodeOptions {
@@ -84,18 +76,34 @@ pub fn build(options: &BuildOptions) -> Result<(), BuildError> {
.or_else(|| detect_output_kind(options)) .or_else(|| detect_output_kind(options))
.ok_or(BuildError::UnknownOutputKind)?; .ok_or(BuildError::UnknownOutputKind)?;
info!("Hoping to generate file of type {:?}", output_kind); log::info!("Hoping to generate file of type {:?}", output_kind);
info!("Looking for project at {}", options.fuzzy_project_path.display()); let mut tree = RbxTree::new(RbxInstanceProperties {
name: "ROOT".to_owned(),
class_name: "Folder".to_owned(),
properties: HashMap::new(),
});
let root_id = tree.get_root_id();
let project = Project::load_fuzzy(&options.fuzzy_project_path)?; log::trace!("Constructing in-memory filesystem");
let mut imfs = Imfs::new(RealFetcher::new(WatchMode::Disabled));
info!("Found project at {}", project.file_location.display()); log::trace!("Reading project root");
info!("Using project {:#?}", project); let entry = imfs.get(&options.fuzzy_project_path)
.expect("could not get project path");
let mut imfs = Imfs::new(); log::trace!("Generating snapshot of instances from IMFS");
imfs.add_roots_from_project(&project)?; let snapshot = snapshot_from_imfs(&mut imfs, &entry)
let tree = construct_oneoff_tree(&project, &imfs)?; .expect("snapshot failed")
.expect("snapshot did not return an instance");
log::trace!("Computing patch set");
let patch_set = compute_patch_set(&snapshot, &tree, root_id);
log::trace!("Applying patch set");
apply_patch_set(&mut tree, &patch_set);
log::trace!("Opening output file for write");
let mut file = BufWriter::new(File::create(&options.output_file)?); let mut file = BufWriter::new(File::create(&options.output_file)?);
match output_kind { match output_kind {
@@ -103,19 +111,16 @@ pub fn build(options: &BuildOptions) -> Result<(), BuildError> {
// Model files include the root instance of the tree and all its // Model files include the root instance of the tree and all its
// descendants. // descendants.
let root_id = tree.get_root_id();
rbx_xml::to_writer(&mut file, &tree, &[root_id], xml_encode_config())?; rbx_xml::to_writer(&mut file, &tree, &[root_id], xml_encode_config())?;
}, },
OutputKind::Rbxlx => { OutputKind::Rbxlx => {
// Place files don't contain an entry for the DataModel, but our // Place files don't contain an entry for the DataModel, but our
// RbxTree representation does. // RbxTree representation does.
let root_id = tree.get_root_id();
let top_level_ids = tree.get_instance(root_id).unwrap().get_children_ids(); let top_level_ids = tree.get_instance(root_id).unwrap().get_children_ids();
rbx_xml::to_writer(&mut file, &tree, top_level_ids, xml_encode_config())?; rbx_xml::to_writer(&mut file, &tree, top_level_ids, xml_encode_config())?;
}, },
OutputKind::Rbxm => { OutputKind::Rbxm => {
let root_id = tree.get_root_id();
rbx_binary::encode(&tree, &[root_id], &mut file)?; rbx_binary::encode(&tree, &[root_id], &mut file)?;
}, },
OutputKind::Rbxl => { OutputKind::Rbxl => {
@@ -123,7 +128,6 @@ pub fn build(options: &BuildOptions) -> Result<(), BuildError> {
log::warn!("Using the XML place format (rbxlx) is recommended instead."); log::warn!("Using the XML place format (rbxlx) is recommended instead.");
log::warn!("For more info, see https://github.com/LPGhatguy/rojo/issues/180"); log::warn!("For more info, see https://github.com/LPGhatguy/rojo/issues/180");
let root_id = tree.get_root_id();
let top_level_ids = tree.get_instance(root_id).unwrap().get_children_ids(); let top_level_ids = tree.get_instance(root_id).unwrap().get_children_ids();
rbx_binary::encode(&tree, top_level_ids, &mut file)?; rbx_binary::encode(&tree, top_level_ids, &mut file)?;
}, },
@@ -131,5 +135,7 @@ pub fn build(options: &BuildOptions) -> Result<(), BuildError> {
file.flush()?; file.flush()?;
log::trace!("Done!");
Ok(()) Ok(())
} }

View File

@@ -1,16 +1,19 @@
use std::{ use std::{
collections::HashMap,
path::PathBuf, path::PathBuf,
sync::Arc, sync::Arc,
}; };
use log::info; use rbx_dom_weak::{RbxTree, RbxInstanceProperties};
use failure::Fail; use failure::Fail;
use crate::{ use crate::{
imfs::new::{Imfs, RealFetcher, WatchMode},
project::{Project, ProjectLoadError}, project::{Project, ProjectLoadError},
serve_session::ServeSession,
snapshot::{apply_patch_set, compute_patch_set},
snapshot_middleware::snapshot_from_imfs,
web::LiveServer, web::LiveServer,
imfs::FsError,
live_session::{LiveSession, LiveSessionError},
}; };
const DEFAULT_PORT: u16 = 34872; const DEFAULT_PORT: u16 = 34872;
@@ -23,40 +26,64 @@ pub struct ServeOptions {
#[derive(Debug, Fail)] #[derive(Debug, Fail)]
pub enum ServeError { pub enum ServeError {
#[fail(display = "Project load error: {}", _0)] #[fail(display = "Couldn't load project: {}", _0)]
ProjectLoadError(#[fail(cause)] ProjectLoadError), ProjectLoad(#[fail(cause)] ProjectLoadError),
#[fail(display = "{}", _0)]
FsError(#[fail(cause)] FsError),
#[fail(display = "{}", _0)]
LiveSessionError(#[fail(cause)] LiveSessionError),
} }
impl_from!(ServeError { impl_from!(ServeError {
ProjectLoadError => ProjectLoadError, ProjectLoadError => ProjectLoad,
FsError => FsError,
LiveSessionError => LiveSessionError,
}); });
pub fn serve(options: &ServeOptions) -> Result<(), ServeError> { pub fn serve(options: &ServeOptions) -> Result<(), ServeError> {
info!("Looking for project at {}", options.fuzzy_project_path.display()); let maybe_project = match Project::load_fuzzy(&options.fuzzy_project_path) {
Ok(project) => Some(project),
let project = Arc::new(Project::load_fuzzy(&options.fuzzy_project_path)?); Err(ProjectLoadError::NotFound) => None,
Err(other) => return Err(other.into()),
info!("Found project at {}", project.file_location.display()); };
info!("Using project {:#?}", project);
let live_session = Arc::new(LiveSession::new(Arc::clone(&project))?);
let server = LiveServer::new(live_session);
let port = options.port let port = options.port
.or(project.serve_port) .or(maybe_project.as_ref().and_then(|project| project.serve_port))
.unwrap_or(DEFAULT_PORT); .unwrap_or(DEFAULT_PORT);
println!("Rojo server listening on port {}", port); println!("Rojo server listening on port {}", port);
let mut tree = RbxTree::new(RbxInstanceProperties {
name: "ROOT".to_owned(),
class_name: "Folder".to_owned(),
properties: HashMap::new(),
});
let root_id = tree.get_root_id();
let mut imfs = Imfs::new(RealFetcher::new(WatchMode::Enabled));
let entry = imfs.get(&options.fuzzy_project_path)
.expect("could not get project path");
let snapshot = snapshot_from_imfs(&mut imfs, &entry)
.expect("snapshot failed")
.expect("snapshot did not return an instance");
let patch_set = compute_patch_set(&snapshot, &tree, root_id);
apply_patch_set(&mut tree, &patch_set);
let session = Arc::new(ServeSession::new(maybe_project));
let server = LiveServer::new(session);
server.start(port); server.start(port);
// let receiver = imfs.change_receiver();
// while let Ok(change) = receiver.recv() {
// imfs.commit_change(&change)
// .expect("Failed to commit Imfs change");
// use notify::DebouncedEvent;
// if let DebouncedEvent::Write(path) = change {
// let contents = imfs.get_contents(path)
// .expect("Failed to read changed path");
// println!("{:?}", std::str::from_utf8(contents));
// }
// }
Ok(()) Ok(())
} }

View File

@@ -1,56 +1,13 @@
use std::{ use std::path::PathBuf;
path::PathBuf,
io,
};
use log::info;
use failure::Fail; use failure::Fail;
use reqwest::header::{ACCEPT, USER_AGENT, CONTENT_TYPE, COOKIE};
use crate::{
imfs::{Imfs, FsError},
project::{Project, ProjectLoadError},
rbx_session::construct_oneoff_tree,
rbx_snapshot::SnapshotError,
};
#[derive(Debug, Fail)] #[derive(Debug, Fail)]
pub enum UploadError { pub enum UploadError {
#[fail(display = "Roblox API Error: {}", _0)] #[fail(display = "This error cannot happen")]
RobloxApiError(String), StubError,
#[fail(display = "Invalid asset kind: {}", _0)]
InvalidKind(String),
#[fail(display = "Project load error: {}", _0)]
ProjectLoadError(#[fail(cause)] ProjectLoadError),
#[fail(display = "IO error: {}", _0)]
IoError(#[fail(cause)] io::Error),
#[fail(display = "HTTP error: {}", _0)]
HttpError(#[fail(cause)] reqwest::Error),
#[fail(display = "XML model file error")]
XmlModelEncodeError(rbx_xml::EncodeError),
#[fail(display = "{}", _0)]
FsError(#[fail(cause)] FsError),
#[fail(display = "{}", _0)]
SnapshotError(#[fail(cause)] SnapshotError),
} }
impl_from!(UploadError {
ProjectLoadError => ProjectLoadError,
io::Error => IoError,
reqwest::Error => HttpError,
rbx_xml::EncodeError => XmlModelEncodeError,
FsError => FsError,
SnapshotError => SnapshotError,
});
#[derive(Debug)] #[derive(Debug)]
pub struct UploadOptions<'a> { pub struct UploadOptions<'a> {
pub fuzzy_project_path: PathBuf, pub fuzzy_project_path: PathBuf,
@@ -59,49 +16,6 @@ pub struct UploadOptions<'a> {
pub kind: Option<&'a str>, pub kind: Option<&'a str>,
} }
pub fn upload(options: &UploadOptions) -> Result<(), UploadError> { pub fn upload(_options: &UploadOptions) -> Result<(), UploadError> {
// TODO: Switch to uploading binary format? unimplemented!("TODO: Reimplement upload command");
info!("Looking for project at {}", options.fuzzy_project_path.display());
let project = Project::load_fuzzy(&options.fuzzy_project_path)?;
info!("Found project at {}", project.file_location.display());
info!("Using project {:#?}", project);
let mut imfs = Imfs::new();
imfs.add_roots_from_project(&project)?;
let tree = construct_oneoff_tree(&project, &imfs)?;
let root_id = tree.get_root_id();
let mut contents = Vec::new();
match options.kind {
Some("place") | None => {
let top_level_ids = tree.get_instance(root_id).unwrap().get_children_ids();
rbx_xml::to_writer_default(&mut contents, &tree, top_level_ids)?;
},
Some("model") => {
rbx_xml::to_writer_default(&mut contents, &tree, &[root_id])?;
},
Some(invalid) => return Err(UploadError::InvalidKind(invalid.to_owned())),
}
let url = format!("https://data.roblox.com/Data/Upload.ashx?assetid={}", options.asset_id);
let client = reqwest::Client::new();
let mut response = client.post(&url)
.header(COOKIE, format!(".ROBLOSECURITY={}", &options.security_cookie))
.header(USER_AGENT, "Roblox/WinInet")
.header("Requester", "Client")
.header(CONTENT_TYPE, "application/xml")
.header(ACCEPT, "application/json")
.body(contents)
.send()?;
if !response.status().is_success() {
return Err(UploadError::RobloxApiError(response.text()?));
}
Ok(())
} }

View File

@@ -1,143 +0,0 @@
use std::{
sync::{mpsc, Arc, Mutex},
time::Duration,
path::Path,
ops::Deref,
thread,
};
use log::{warn, trace};
use notify::{
self,
DebouncedEvent,
RecommendedWatcher,
RecursiveMode,
Watcher,
};
use crate::{
imfs::Imfs,
rbx_session::RbxSession,
};
const WATCH_TIMEOUT: Duration = Duration::from_millis(100);
/// Watches for changes on the filesystem and links together the in-memory
/// filesystem and in-memory Roblox tree.
pub struct FsWatcher {
watcher: RecommendedWatcher,
}
impl FsWatcher {
/// Start a new FS watcher, watching all of the roots currently attached to
/// the given Imfs.
///
/// `rbx_session` is optional to make testing easier. If it isn't `None`,
/// events will be passed to it after they're given to the Imfs.
pub fn start(imfs: Arc<Mutex<Imfs>>, rbx_session: Option<Arc<Mutex<RbxSession>>>) -> FsWatcher {
let (watch_tx, watch_rx) = mpsc::channel();
let mut watcher = notify::watcher(watch_tx, WATCH_TIMEOUT)
.expect("Could not create filesystem watcher");
{
let imfs = imfs.lock().unwrap();
for root_path in imfs.get_roots() {
trace!("Watching path {}", root_path.display());
watcher.watch(root_path, RecursiveMode::Recursive)
.expect("Could not watch directory");
}
}
{
let imfs = Arc::clone(&imfs);
let rbx_session = rbx_session.as_ref().map(Arc::clone);
thread::spawn(move || {
trace!("Watcher thread started");
while let Ok(event) = watch_rx.recv() {
// handle_fs_event expects an Option<&Mutex<T>>, but we have
// an Option<Arc<Mutex<T>>>, so we coerce with Deref.
let session_ref = rbx_session.as_ref().map(Deref::deref);
handle_fs_event(&imfs, session_ref, event);
}
trace!("Watcher thread stopped");
});
}
FsWatcher {
watcher,
}
}
pub fn stop_watching_path(&mut self, path: &Path) {
match self.watcher.unwatch(path) {
Ok(_) => {},
Err(e) => {
warn!("Could not unwatch path {}: {}", path.display(), e);
},
}
}
}
fn handle_fs_event(imfs: &Mutex<Imfs>, rbx_session: Option<&Mutex<RbxSession>>, event: DebouncedEvent) {
match event {
DebouncedEvent::Create(path) => {
trace!("Path created: {}", path.display());
{
let mut imfs = imfs.lock().unwrap();
imfs.path_created(&path).unwrap();
}
if let Some(rbx_session) = rbx_session {
let mut rbx_session = rbx_session.lock().unwrap();
rbx_session.path_created(&path);
}
},
DebouncedEvent::Write(path) => {
trace!("Path created: {}", path.display());
{
let mut imfs = imfs.lock().unwrap();
imfs.path_updated(&path).unwrap();
}
if let Some(rbx_session) = rbx_session {
let mut rbx_session = rbx_session.lock().unwrap();
rbx_session.path_updated(&path);
}
},
DebouncedEvent::Remove(path) => {
trace!("Path removed: {}", path.display());
{
let mut imfs = imfs.lock().unwrap();
imfs.path_removed(&path).unwrap();
}
if let Some(rbx_session) = rbx_session {
let mut rbx_session = rbx_session.lock().unwrap();
rbx_session.path_removed(&path);
}
},
DebouncedEvent::Rename(from_path, to_path) => {
trace!("Path renamed: {} to {}", from_path.display(), to_path.display());
{
let mut imfs = imfs.lock().unwrap();
imfs.path_moved(&from_path, &to_path).unwrap();
}
if let Some(rbx_session) = rbx_session {
let mut rbx_session = rbx_session.lock().unwrap();
rbx_session.path_renamed(&from_path, &to_path);
}
},
other => {
trace!("Unhandled FS event: {:?}", other);
},
}
}

View File

@@ -1,331 +0,0 @@
use std::{
cmp::Ordering,
collections::{HashMap, HashSet, BTreeSet},
fmt,
fs,
io,
path::{self, Path, PathBuf},
};
use failure::Fail;
use serde::{Serialize, Deserialize};
use crate::project::{Project, ProjectNode};
/// A wrapper around io::Error that also attaches the path associated with the
/// error.
#[derive(Debug, Fail)]
pub struct FsError {
#[fail(cause)]
inner: io::Error,
path: PathBuf,
}
impl FsError {
fn new<P: Into<PathBuf>>(inner: io::Error, path: P) -> FsError {
FsError {
inner,
path: path.into(),
}
}
}
impl fmt::Display for FsError {
fn fmt(&self, output: &mut fmt::Formatter) -> fmt::Result {
write!(output, "{}: {}", self.path.display(), self.inner)
}
}
fn add_sync_points(imfs: &mut Imfs, node: &ProjectNode) -> Result<(), FsError> {
if let Some(path) = &node.path {
imfs.add_root(path)?;
}
for child in node.children.values() {
add_sync_points(imfs, child)?;
}
Ok(())
}
/// The in-memory filesystem keeps a mirror of all files being watched by Rojo
/// in order to deduplicate file changes in the case of bidirectional syncing
/// from Roblox Studio.
///
/// It also enables Rojo to quickly generate React-like snapshots to make
/// reasoning about instances and how they relate to files easier.
#[derive(Debug, Clone)]
pub struct Imfs {
items: HashMap<PathBuf, ImfsItem>,
roots: HashSet<PathBuf>,
}
impl Imfs {
pub fn new() -> Imfs {
Imfs {
items: HashMap::new(),
roots: HashSet::new(),
}
}
pub fn add_roots_from_project(&mut self, project: &Project) -> Result<(), FsError> {
add_sync_points(self, &project.tree)
}
pub fn get_roots(&self) -> &HashSet<PathBuf> {
&self.roots
}
pub fn get_items(&self) -> &HashMap<PathBuf, ImfsItem> {
&self.items
}
pub fn get(&self, path: &Path) -> Option<&ImfsItem> {
debug_assert!(path.is_absolute());
debug_assert!(self.is_within_roots(path));
self.items.get(path)
}
pub fn add_root(&mut self, path: &Path) -> Result<(), FsError> {
debug_assert!(path.is_absolute());
if !self.is_within_roots(path) {
self.roots.insert(path.to_path_buf());
self.descend_and_read_from_disk(path)?;
}
Ok(())
}
pub fn remove_root(&mut self, path: &Path) {
debug_assert!(path.is_absolute());
if self.roots.get(path).is_some() {
self.remove_item(path);
if let Some(parent_path) = path.parent() {
self.unlink_child(parent_path, path);
}
}
}
pub fn path_created(&mut self, path: &Path) -> Result<(), FsError> {
debug_assert!(path.is_absolute());
debug_assert!(self.is_within_roots(path));
self.descend_and_read_from_disk(path)
}
pub fn path_updated(&mut self, path: &Path) -> Result<(), FsError> {
debug_assert!(path.is_absolute());
debug_assert!(self.is_within_roots(path));
self.descend_and_read_from_disk(path)
}
pub fn path_removed(&mut self, path: &Path) -> Result<(), FsError> {
debug_assert!(path.is_absolute());
debug_assert!(self.is_within_roots(path));
self.remove_item(path);
if let Some(parent_path) = path.parent() {
self.unlink_child(parent_path, path);
}
Ok(())
}
pub fn path_moved(&mut self, from_path: &Path, to_path: &Path) -> Result<(), FsError> {
self.path_removed(from_path)?;
self.path_created(to_path)?;
Ok(())
}
pub fn get_root_for_path<'a>(&'a self, path: &Path) -> Option<&'a Path> {
for root_path in &self.roots {
if path.starts_with(root_path) {
return Some(root_path);
}
}
None
}
fn remove_item(&mut self, path: &Path) {
if let Some(ImfsItem::Directory(directory)) = self.items.remove(path) {
for child_path in &directory.children {
self.remove_item(child_path);
}
}
}
fn unlink_child(&mut self, parent: &Path, child: &Path) {
let parent_item = self.items.get_mut(parent);
match parent_item {
Some(ImfsItem::Directory(directory)) => {
directory.children.remove(child);
},
_ => {},
}
}
fn link_child(&mut self, parent: &Path, child: &Path) {
if self.is_within_roots(parent) {
let parent_item = self.items.get_mut(parent);
match parent_item {
Some(ImfsItem::Directory(directory)) => {
directory.children.insert(child.to_path_buf());
},
_ => {
panic!("Tried to link child of path that wasn't a directory!");
},
}
}
}
fn descend_and_read_from_disk(&mut self, path: &Path) -> Result<(), FsError> {
let root_path = self.get_root_path(path)
.expect("Tried to descent and read for path that wasn't within roots!");
// If this path is a root, we should read the entire thing.
if root_path == path {
self.read_from_disk(path)?;
return Ok(());
}
let relative_path = path.strip_prefix(root_path).unwrap();
let mut current_path = root_path.to_path_buf();
for component in relative_path.components() {
match component {
path::Component::Normal(name) => {
let next_path = current_path.join(name);
if self.items.contains_key(&next_path) {
current_path = next_path;
} else {
break;
}
},
_ => unreachable!(),
}
}
self.read_from_disk(&current_path)
}
fn read_from_disk(&mut self, path: &Path) -> Result<(), FsError> {
let metadata = fs::metadata(path)
.map_err(|e| FsError::new(e, path))?;
if metadata.is_file() {
let contents = fs::read(path)
.map_err(|e| FsError::new(e, path))?;
let item = ImfsItem::File(ImfsFile {
path: path.to_path_buf(),
contents,
});
self.items.insert(path.to_path_buf(), item);
if let Some(parent_path) = path.parent() {
self.link_child(parent_path, path);
}
Ok(())
} else if metadata.is_dir() {
let item = ImfsItem::Directory(ImfsDirectory {
path: path.to_path_buf(),
children: BTreeSet::new(),
});
self.items.insert(path.to_path_buf(), item);
let dir_children = fs::read_dir(path)
.map_err(|e| FsError::new(e, path))?;
for entry in dir_children {
let entry = entry
.map_err(|e| FsError::new(e, path))?;
let child_path = entry.path();
self.read_from_disk(&child_path)?;
}
if let Some(parent_path) = path.parent() {
self.link_child(parent_path, path);
}
Ok(())
} else {
panic!("Unexpected non-file, non-directory item");
}
}
fn get_root_path<'a>(&'a self, path: &Path) -> Option<&'a Path> {
for root_path in &self.roots {
if path.starts_with(root_path) {
return Some(root_path)
}
}
None
}
fn is_within_roots(&self, path: &Path) -> bool {
for root_path in &self.roots {
if path.starts_with(root_path) {
return true;
}
}
false
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct ImfsFile {
pub path: PathBuf,
pub contents: Vec<u8>,
}
impl PartialOrd for ImfsFile {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for ImfsFile {
fn cmp(&self, other: &Self) -> Ordering {
self.path.cmp(&other.path)
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct ImfsDirectory {
pub path: PathBuf,
pub children: BTreeSet<PathBuf>,
}
impl PartialOrd for ImfsDirectory {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for ImfsDirectory {
fn cmp(&self, other: &Self) -> Ordering {
self.path.cmp(&other.path)
}
}
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
pub enum ImfsItem {
File(ImfsFile),
Directory(ImfsDirectory),
}

65
server/src/imfs/error.rs Normal file
View File

@@ -0,0 +1,65 @@
use std::{
io,
fmt,
path::PathBuf,
};
use failure::Fail;
pub type FsResult<T> = Result<T, FsError>;
pub use io::ErrorKind as FsErrorKind;
pub trait FsResultExt<T> {
fn with_not_found(self) -> Result<Option<T>, FsError>;
}
impl<T> FsResultExt<T> for Result<T, FsError> {
fn with_not_found(self) -> Result<Option<T>, FsError> {
match self {
Ok(value) => Ok(Some(value)),
Err(ref err) if err.kind() == FsErrorKind::NotFound => Ok(None),
Err(err) => Err(err),
}
}
}
// TODO: New error type that contains errors specific to our application,
// wrapping io::Error either directly or through another error type that has
// path information.
//
// It's possible that we should hoist up the path information one more level, or
// destructure/restructure information to hoist the path out of FsError and just
// embed io::Error?
pub enum ImfsError {
NotFound,
WrongKind,
Io(io::Error),
}
/// A wrapper around io::Error that also attaches the path associated with the
/// error.
#[derive(Debug, Fail)]
pub struct FsError {
#[fail(cause)]
inner: io::Error,
path: PathBuf,
}
impl FsError {
pub fn new<P: Into<PathBuf>>(inner: io::Error, path: P) -> FsError {
FsError {
inner,
path: path.into(),
}
}
pub fn kind(&self) -> FsErrorKind {
self.inner.kind()
}
}
impl fmt::Display for FsError {
fn fmt(&self, output: &mut fmt::Formatter) -> fmt::Result {
write!(output, "{}: {}", self.path.display(), self.inner)
}
}

View File

@@ -0,0 +1,32 @@
use std::{
io,
path::{Path, PathBuf},
};
use crossbeam_channel::Receiver;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum FileType {
File,
Directory,
}
// TODO: Use our own event type instead of notify's.
pub type ImfsEvent = notify::DebouncedEvent;
/// The generic interface that `Imfs` uses to lazily read files from the disk.
/// In tests, it's stubbed out to do different versions of absolutely nothing
/// depending on the test.
pub trait ImfsFetcher {
fn file_type(&mut self, path: &Path) -> io::Result<FileType>;
fn read_children(&mut self, path: &Path) -> io::Result<Vec<PathBuf>>;
fn read_contents(&mut self, path: &Path) -> io::Result<Vec<u8>>;
fn create_directory(&mut self, path: &Path) -> io::Result<()>;
fn write_file(&mut self, path: &Path, contents: &[u8]) -> io::Result<()>;
fn remove(&mut self, path: &Path) -> io::Result<()>;
fn watch(&mut self, path: &Path);
fn unwatch(&mut self, path: &Path);
fn receiver(&self) -> Receiver<ImfsEvent>;
}

530
server/src/imfs/imfs.rs Normal file
View File

@@ -0,0 +1,530 @@
use std::{
io,
path::{Path, PathBuf},
};
use crossbeam_channel::Receiver;
use crate::path_map::PathMap;
use super::{
snapshot::ImfsSnapshot,
error::{FsResult, FsError},
fetcher::{ImfsFetcher, FileType, ImfsEvent},
};
/// An in-memory filesystem that can be incrementally populated and updated as
/// filesystem modification events occur.
///
/// All operations on the `Imfs` are lazy and do I/O as late as they can to
/// avoid reading extraneous files or directories from the disk. This means that
/// they all take `self` mutably, and means that it isn't possible to hold
/// references to the internal state of the Imfs while traversing it!
///
/// Most operations return `ImfsEntry` objects to work around this, which is
/// effectively a index into the `Imfs`.
pub struct Imfs<F> {
inner: PathMap<ImfsItem>,
fetcher: F,
}
impl<F: ImfsFetcher> Imfs<F> {
pub fn new(fetcher: F) -> Imfs<F> {
Imfs {
inner: PathMap::new(),
fetcher,
}
}
pub fn change_receiver(&self) -> Receiver<ImfsEvent> {
self.fetcher.receiver()
}
pub fn commit_pending_changes(&mut self) -> FsResult<Vec<ImfsEvent>> {
let receiver = self.fetcher.receiver();
let mut changes = Vec::new();
while let Ok(event) = receiver.try_recv() {
self.commit_change(&event)?;
changes.push(event);
}
Ok(changes)
}
pub fn commit_change(&mut self, event: &ImfsEvent) -> FsResult<()> {
use notify::DebouncedEvent::*;
log::trace!("Committing Imfs change {:?}", event);
match event {
Create(path) => {
self.raise_file_changed(path)?;
}
Write(path) => {
self.raise_file_changed(path)?;
}
Remove(path) => {
self.raise_file_removed(path)?;
}
Rename(from_path, to_path) => {
self.raise_file_removed(from_path)?;
self.raise_file_changed(to_path)?;
}
Error(err, path) => {
log::warn!("Filesystem error detected: {:?} on path {:?}", err, path);
}
Rescan => {
// FIXME: Implement rescanning
log::warn!("Unhandled filesystem rescan event");
}
NoticeWrite(_) | NoticeRemove(_) | Chmod(_) => {}
}
Ok(())
}
pub fn load_from_snapshot(&mut self, path: impl AsRef<Path>, snapshot: ImfsSnapshot) {
let path = path.as_ref();
match snapshot {
ImfsSnapshot::File(file) => {
self.inner.insert(path.to_path_buf(), ImfsItem::File(ImfsFile {
path: path.to_path_buf(),
contents: Some(file.contents),
}));
}
ImfsSnapshot::Directory(directory) => {
self.inner.insert(path.to_path_buf(), ImfsItem::Directory(ImfsDirectory {
path: path.to_path_buf(),
children_enumerated: true,
}));
for (child_name, child) in directory.children.into_iter() {
self.load_from_snapshot(path.join(child_name), child);
}
}
}
}
fn raise_file_changed(&mut self, path: impl AsRef<Path>) -> FsResult<()> {
let path = path.as_ref();
if !self.would_be_resident(path) {
return Ok(());
}
let new_type = self.fetcher.file_type(path)
.map_err(|err| FsError::new(err, path.to_path_buf()))?;
match self.inner.get_mut(path) {
Some(existing_item) => {
match (existing_item, &new_type) {
(ImfsItem::File(existing_file), FileType::File) => {
// Invalidate the existing file contents.
// We can probably be smarter about this by reading the changed file.
existing_file.contents = None;
}
(ImfsItem::Directory(_), FileType::Directory) => {
// No changes required, a directory updating doesn't mean anything to us.
self.fetcher.watch(path);
}
(ImfsItem::File(_), FileType::Directory) => {
self.inner.remove(path);
self.inner.insert(path.to_path_buf(), ImfsItem::new_from_type(FileType::Directory, path));
self.fetcher.watch(path);
}
(ImfsItem::Directory(_), FileType::File) => {
self.inner.remove(path);
self.inner.insert(path.to_path_buf(), ImfsItem::new_from_type(FileType::File, path));
self.fetcher.unwatch(path);
}
}
}
None => {
self.inner.insert(path.to_path_buf(), ImfsItem::new_from_type(new_type, path));
}
}
Ok(())
}
fn raise_file_removed(&mut self, path: impl AsRef<Path>) -> FsResult<()> {
let path = path.as_ref();
if !self.would_be_resident(path) {
return Ok(());
}
self.inner.remove(path);
self.fetcher.unwatch(path);
Ok(())
}
pub fn get(&mut self, path: impl AsRef<Path>) -> FsResult<ImfsEntry> {
self.read_if_not_exists(path.as_ref())?;
let item = self.inner.get(path.as_ref()).unwrap();
let is_file = match item {
ImfsItem::File(_) => true,
ImfsItem::Directory(_) => false,
};
Ok(ImfsEntry {
path: item.path().to_path_buf(),
is_file,
})
}
pub fn get_contents(&mut self, path: impl AsRef<Path>) -> FsResult<&[u8]> {
let path = path.as_ref();
self.read_if_not_exists(path)?;
match self.inner.get_mut(path).unwrap() {
ImfsItem::File(file) => {
if file.contents.is_none() {
file.contents = Some(self.fetcher.read_contents(path)
.map_err(|err| FsError::new(err, path.to_path_buf()))?);
}
Ok(file.contents.as_ref().unwrap())
}
ImfsItem::Directory(_) => Err(FsError::new(io::Error::new(io::ErrorKind::Other, "Can't read a directory"), path.to_path_buf()))
}
}
pub fn get_children(&mut self, path: impl AsRef<Path>) -> FsResult<Vec<ImfsEntry>> {
let path = path.as_ref();
self.read_if_not_exists(path)?;
match self.inner.get(path).unwrap() {
ImfsItem::Directory(dir) => {
self.fetcher.watch(path);
if dir.children_enumerated {
return self.inner.children(path)
.unwrap() // TODO: Handle None here, which means the PathMap entry did not exist.
.into_iter()
.map(PathBuf::from) // Convert paths from &Path to PathBuf
.collect::<Vec<PathBuf>>() // Collect all PathBufs, since self.get needs to borrow self mutably.
.into_iter()
.map(|path| self.get(path))
.collect::<FsResult<Vec<ImfsEntry>>>();
}
self.fetcher.read_children(path)
.map_err(|err| FsError::new(err, path.to_path_buf()))?
.into_iter()
.map(|path| self.get(path))
.collect::<FsResult<Vec<ImfsEntry>>>()
}
ImfsItem::File(_) => Err(FsError::new(io::Error::new(io::ErrorKind::Other, "Can't read a directory"), path.to_path_buf()))
}
}
/// Tells whether the given path, if it were loaded, would be loaded if it
/// existed.
///
/// Returns true if the path is loaded or if its parent is loaded, is a
/// directory, and is marked as having been enumerated before.
///
/// This idea corresponds to whether a file change event should result in
/// tangible changes to the in-memory filesystem. If a path would be
/// resident, we need to read it, and if its contents were known before, we
/// need to update them.
fn would_be_resident(&self, path: &Path) -> bool {
if self.inner.contains_key(path) {
return true;
}
if let Some(parent) = path.parent() {
if let Some(ImfsItem::Directory(dir)) = self.inner.get(parent) {
return !dir.children_enumerated;
}
}
false
}
/// Attempts to read the path into the `Imfs` if it doesn't exist.
///
/// This does not necessitate that file contents or directory children will
/// be read. Depending on the `ImfsFetcher` implementation that the `Imfs`
/// is using, this call may read exactly only the given path and no more.
fn read_if_not_exists(&mut self, path: &Path) -> FsResult<()> {
if !self.inner.contains_key(path) {
let kind = self.fetcher.file_type(path)
.map_err(|err| FsError::new(err, path.to_path_buf()))?;
if kind == FileType::Directory {
self.fetcher.watch(path);
}
self.inner.insert(path.to_path_buf(), ImfsItem::new_from_type(kind, path));
}
Ok(())
}
}
/// A reference to file or folder in an `Imfs`. Can only be produced by the
/// entry existing in the Imfs, but can later point to nothing if something
/// would invalidate that path.
///
/// This struct does not borrow from the Imfs since every operation has the
/// possibility to mutate the underlying data structure and move memory around.
pub struct ImfsEntry {
path: PathBuf,
is_file: bool,
}
impl ImfsEntry {
pub fn path(&self) -> &Path {
&self.path
}
pub fn contents<'imfs>(
&self,
imfs: &'imfs mut Imfs<impl ImfsFetcher>,
) -> FsResult<&'imfs [u8]> {
imfs.get_contents(&self.path)
}
pub fn children(
&self,
imfs: &mut Imfs<impl ImfsFetcher>,
) -> FsResult<Vec<ImfsEntry>> {
imfs.get_children(&self.path)
}
pub fn is_file(&self) -> bool {
self.is_file
}
pub fn is_directory(&self) -> bool {
!self.is_file
}
}
/// Internal structure describing potentially partially-resident files and
/// folders in the `Imfs`.
pub enum ImfsItem {
File(ImfsFile),
Directory(ImfsDirectory),
}
impl ImfsItem {
fn path(&self) -> &Path {
match self {
ImfsItem::File(file) => &file.path,
ImfsItem::Directory(dir) => &dir.path,
}
}
fn new_from_type(kind: FileType, path: impl Into<PathBuf>) -> ImfsItem {
match kind {
FileType::Directory => ImfsItem::Directory(ImfsDirectory {
path: path.into(),
children_enumerated: false,
}),
FileType::File => ImfsItem::File(ImfsFile {
path: path.into(),
contents: None,
}),
}
}
}
pub struct ImfsFile {
pub(super) path: PathBuf,
pub(super) contents: Option<Vec<u8>>,
}
pub struct ImfsDirectory {
pub(super) path: PathBuf,
pub(super) children_enumerated: bool,
}
#[cfg(test)]
mod test {
use super::*;
use std::{
rc::Rc,
cell::RefCell,
};
use crossbeam_channel::Receiver;
use maplit::hashmap;
use super::super::{
noop_fetcher::NoopFetcher,
error::FsErrorKind,
fetcher::ImfsEvent,
};
#[test]
fn from_snapshot_file() {
let mut imfs = Imfs::new(NoopFetcher);
let file = ImfsSnapshot::file("hello, world!");
imfs.load_from_snapshot("/hello.txt", file);
let entry = imfs.get_contents("/hello.txt").unwrap();
assert_eq!(entry, b"hello, world!");
}
#[test]
fn from_snapshot_dir() {
let mut imfs = Imfs::new(NoopFetcher);
let dir = ImfsSnapshot::dir(hashmap! {
"a.txt" => ImfsSnapshot::file("contents of a.txt"),
"b.lua" => ImfsSnapshot::file("contents of b.lua"),
});
imfs.load_from_snapshot("/dir", dir);
let children = imfs.get_children("/dir").unwrap();
let mut has_a = false;
let mut has_b = false;
for child in children.into_iter() {
if child.path() == Path::new("/dir/a.txt") {
has_a = true;
} else if child.path() == Path::new("/dir/b.lua") {
has_b = true;
} else {
panic!("Unexpected child in /dir");
}
}
assert!(has_a, "/dir/a.txt was missing");
assert!(has_b, "/dir/b.lua was missing");
let a = imfs.get_contents("/dir/a.txt").unwrap();
assert_eq!(a, b"contents of a.txt");
let b = imfs.get_contents("/dir/b.lua").unwrap();
assert_eq!(b, b"contents of b.lua");
}
#[test]
fn changed_event() {
#[derive(Default)]
struct MockState {
a_contents: &'static str,
}
struct MockFetcher {
inner: Rc<RefCell<MockState>>,
}
impl ImfsFetcher for MockFetcher {
fn file_type(&mut self, path: &Path) -> io::Result<FileType> {
if path == Path::new("/dir/a.txt") {
return Ok(FileType::File);
}
unimplemented!();
}
fn read_contents(&mut self, path: &Path) -> io::Result<Vec<u8>> {
if path == Path::new("/dir/a.txt") {
let inner = self.inner.borrow();
return Ok(Vec::from(inner.a_contents));
}
unimplemented!();
}
fn read_children(&mut self, _path: &Path) -> io::Result<Vec<PathBuf>> {
unimplemented!();
}
fn create_directory(&mut self, _path: &Path) -> io::Result<()> {
unimplemented!();
}
fn write_file(&mut self, _path: &Path, _contents: &[u8]) -> io::Result<()> {
unimplemented!();
}
fn remove(&mut self, _path: &Path) -> io::Result<()> {
unimplemented!();
}
fn watch(&mut self, _path: &Path) {
}
fn unwatch(&mut self, _path: &Path) {
}
fn receiver(&self) -> Receiver<ImfsEvent> {
crossbeam_channel::never()
}
}
let mock_state = Rc::new(RefCell::new(MockState {
a_contents: "Initial contents",
}));
let mut imfs = Imfs::new(MockFetcher {
inner: mock_state.clone(),
});
let a = imfs.get("/dir/a.txt")
.expect("mock file did not exist");
let contents = a.contents(&mut imfs)
.expect("mock file contents error");
assert_eq!(contents, b"Initial contents");
{
let mut mock_state = mock_state.borrow_mut();
mock_state.a_contents = "Changed contents";
}
imfs.raise_file_changed("/dir/a.txt")
.expect("error processing file change");
let contents = a.contents(&mut imfs)
.expect("mock file contents error");
assert_eq!(contents, b"Changed contents");
}
#[test]
fn removed_event_existing() {
let mut imfs = Imfs::new(NoopFetcher);
let file = ImfsSnapshot::file("hello, world!");
imfs.load_from_snapshot("/hello.txt", file);
let hello = imfs.get("/hello.txt")
.expect("couldn't get hello.txt");
let contents = hello.contents(&mut imfs)
.expect("couldn't get hello.txt contents");
assert_eq!(contents, b"hello, world!");
imfs.raise_file_removed("/hello.txt")
.expect("error processing file removal");
match imfs.get("hello.txt") {
Err(ref err) if err.kind() == FsErrorKind::NotFound => {}
Ok(_) => {
panic!("hello.txt was not removed from Imfs");
}
Err(err) => {
panic!("Unexpected error: {:?}", err);
}
}
}
}

17
server/src/imfs/mod.rs Normal file
View File

@@ -0,0 +1,17 @@
mod error;
mod fetcher;
mod imfs;
mod noop_fetcher;
mod real_fetcher;
mod snapshot;
pub use error::*;
pub mod new {
pub use super::error::*;
pub use super::imfs::*;
pub use super::fetcher::*;
pub use super::real_fetcher::*;
pub use super::noop_fetcher::*;
pub use super::snapshot::*;
}

View File

@@ -0,0 +1,49 @@
//! Implements the IMFS fetcher interface for a fake filesystem using Rust's
//! std::fs interface.
use std::{
io,
path::{Path, PathBuf},
};
use crossbeam_channel::Receiver;
use super::fetcher::{ImfsFetcher, FileType, ImfsEvent};
pub struct NoopFetcher;
impl ImfsFetcher for NoopFetcher {
fn file_type(&mut self, _path: &Path) -> io::Result<FileType> {
Err(io::Error::new(io::ErrorKind::NotFound, "NoopFetcher always returns NotFound"))
}
fn read_children(&mut self, _path: &Path) -> io::Result<Vec<PathBuf>> {
Err(io::Error::new(io::ErrorKind::NotFound, "NoopFetcher always returns NotFound"))
}
fn read_contents(&mut self, _path: &Path) -> io::Result<Vec<u8>> {
Err(io::Error::new(io::ErrorKind::NotFound, "NoopFetcher always returns NotFound"))
}
fn create_directory(&mut self, _path: &Path) -> io::Result<()> {
Ok(())
}
fn write_file(&mut self, _path: &Path, _contents: &[u8]) -> io::Result<()> {
Ok(())
}
fn remove(&mut self, _path: &Path) -> io::Result<()> {
Ok(())
}
fn watch(&mut self, _path: &Path) {
}
fn unwatch(&mut self, _path: &Path) {
}
fn receiver(&self) -> Receiver<ImfsEvent> {
crossbeam_channel::never()
}
}

View File

@@ -0,0 +1,155 @@
//! Implements the IMFS fetcher interface for the real filesystem using Rust's
//! std::fs interface and notify as the file watcher.
use std::{
fs,
io,
path::{Path, PathBuf},
sync::mpsc,
time::Duration,
};
use jod_thread::JoinHandle;
use crossbeam_channel::{Receiver, unbounded};
use notify::{RecursiveMode, RecommendedWatcher, Watcher};
use super::fetcher::{ImfsFetcher, FileType, ImfsEvent};
/// Workaround to disable the file watcher for processes that don't need it,
/// since notify appears hang on to mpsc Sender objects too long, causing Rojo
/// to deadlock on drop.
///
/// We can make constructing the watcher optional in order to hotfix rojo build.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum WatchMode {
Enabled,
Disabled,
}
pub struct RealFetcher {
// Drop order is relevant here!
//
// `watcher` must be dropped before `_converter_thread` or else joining the
// thread will cause a deadlock.
watcher: Option<RecommendedWatcher>,
/// Thread handle to convert notify's mpsc channel messages into
/// crossbeam_channel messages.
_converter_thread: JoinHandle<()>,
receiver: Receiver<ImfsEvent>,
}
impl RealFetcher {
pub fn new(watch_mode: WatchMode) -> RealFetcher {
log::trace!("Starting RealFetcher with watch mode {:?}", watch_mode);
let (notify_sender, notify_receiver) = mpsc::channel();
let (sender, receiver) = unbounded();
let handle = jod_thread::Builder::new()
.name("notify message converter".to_owned())
.spawn(move || {
notify_receiver
.into_iter()
.for_each(|event| { sender.send(event).unwrap() });
})
.expect("Could not start message converter thread");
// TODO: Investigate why notify hangs onto notify_sender too long,
// causing our program to deadlock. Once this is fixed, watcher no
// longer needs to be optional, but is still maybe useful?
let watcher = match watch_mode {
WatchMode::Enabled => {
Some(notify::watcher(notify_sender, Duration::from_millis(300))
.expect("Couldn't start 'notify' file watcher"))
}
WatchMode::Disabled => None,
};
RealFetcher {
watcher,
_converter_thread: handle,
receiver,
}
}
}
impl ImfsFetcher for RealFetcher {
fn file_type(&mut self, path: &Path) -> io::Result<FileType> {
let metadata = fs::metadata(path)?;
if metadata.is_file() {
Ok(FileType::File)
} else {
Ok(FileType::Directory)
}
}
fn read_children(&mut self, path: &Path) -> io::Result<Vec<PathBuf>> {
log::trace!("Reading directory {}", path.display());
let mut result = Vec::new();
let iter = fs::read_dir(path)?;
for entry in iter {
result.push(entry?.path());
}
Ok(result)
}
fn read_contents(&mut self, path: &Path) -> io::Result<Vec<u8>> {
log::trace!("Reading file {}", path.display());
fs::read(path)
}
fn create_directory(&mut self, path: &Path) -> io::Result<()> {
log::trace!("Creating directory {}", path.display());
fs::create_dir(path)
}
fn write_file(&mut self, path: &Path, contents: &[u8]) -> io::Result<()> {
log::trace!("Writing path {}", path.display());
fs::write(path, contents)
}
fn remove(&mut self, path: &Path) -> io::Result<()> {
log::trace!("Removing path {}", path.display());
let metadata = fs::metadata(path)?;
if metadata.is_file() {
fs::remove_file(path)
} else {
fs::remove_dir_all(path)
}
}
fn watch(&mut self, path: &Path) {
log::trace!("Watching path {}", path.display());
if let Some(watcher) = self.watcher.as_mut() {
if let Err(err) = watcher.watch(path, RecursiveMode::NonRecursive) {
log::warn!("Couldn't watch path {}: {:?}", path.display(), err);
}
}
}
fn unwatch(&mut self, path: &Path) {
log::trace!("Stopped watching path {}", path.display());
if let Some(watcher) = self.watcher.as_mut() {
if let Err(err) = watcher.unwatch(path) {
log::warn!("Couldn't unwatch path {}: {:?}", path.display(), err);
}
}
}
fn receiver(&self) -> Receiver<ImfsEvent> {
self.receiver.clone()
}
}

View File

@@ -0,0 +1,38 @@
use std::collections::HashMap;
#[derive(Debug, Clone)]
pub enum ImfsSnapshot {
File(FileSnapshot),
Directory(DirectorySnapshot),
}
impl ImfsSnapshot {
/// Create a new file ImfsSnapshot with the given contents.
pub fn file(contents: impl Into<Vec<u8>>) -> ImfsSnapshot {
ImfsSnapshot::File(FileSnapshot {
contents: contents.into(),
})
}
/// Create a new directory ImfsSnapshot with the given children.
pub fn dir<S: Into<String>>(children: HashMap<S, ImfsSnapshot>) -> ImfsSnapshot {
let children = children
.into_iter()
.map(|(k, v)| (k.into(), v))
.collect();
ImfsSnapshot::Directory(DirectorySnapshot {
children,
})
}
}
#[derive(Debug, Clone)]
pub struct FileSnapshot {
pub contents: Vec<u8>,
}
#[derive(Debug, Clone)]
pub struct DirectorySnapshot {
pub children: HashMap<String, ImfsSnapshot>,
}

View File

@@ -2,20 +2,18 @@
// Macros // Macros
#[macro_use] #[macro_use]
pub mod impl_from; mod impl_from;
// Other modules // Other modules
pub mod commands; pub mod commands;
pub mod fs_watcher;
pub mod imfs;
pub mod live_session;
pub mod message_queue;
pub mod path_map;
pub mod path_serializer;
pub mod project; pub mod project;
pub mod rbx_session;
pub mod rbx_snapshot; mod imfs;
pub mod session_id; mod message_queue;
pub mod snapshot_reconciler; mod path_map;
pub mod visualize; mod path_serializer;
pub mod web; mod serve_session;
mod session_id;
mod snapshot;
mod snapshot_middleware;
mod web;

View File

@@ -1,99 +0,0 @@
use std::{
collections::HashSet,
mem,
sync::{Arc, Mutex},
};
use failure::Fail;
use crate::{
fs_watcher::FsWatcher,
imfs::{Imfs, FsError},
message_queue::MessageQueue,
project::Project,
rbx_session::RbxSession,
rbx_snapshot::SnapshotError,
session_id::SessionId,
snapshot_reconciler::InstanceChanges,
};
#[derive(Debug, Fail)]
pub enum LiveSessionError {
#[fail(display = "{}", _0)]
Fs(#[fail(cause)] FsError),
#[fail(display = "{}", _0)]
Snapshot(#[fail(cause)] SnapshotError),
}
impl_from!(LiveSessionError {
FsError => Fs,
SnapshotError => Snapshot,
});
/// Contains all of the state for a Rojo live-sync session.
pub struct LiveSession {
project: Arc<Project>,
session_id: SessionId,
pub message_queue: Arc<MessageQueue<InstanceChanges>>,
pub rbx_session: Arc<Mutex<RbxSession>>,
pub imfs: Arc<Mutex<Imfs>>,
_fs_watcher: FsWatcher,
}
impl LiveSession {
pub fn new(project: Arc<Project>) -> Result<LiveSession, LiveSessionError> {
let imfs = {
let mut imfs = Imfs::new();
imfs.add_roots_from_project(&project)?;
Arc::new(Mutex::new(imfs))
};
let message_queue = Arc::new(MessageQueue::new());
let rbx_session = Arc::new(Mutex::new(RbxSession::new(
Arc::clone(&project),
Arc::clone(&imfs),
Arc::clone(&message_queue),
)?));
let fs_watcher = FsWatcher::start(
Arc::clone(&imfs),
Some(Arc::clone(&rbx_session)),
);
let session_id = SessionId::new();
Ok(LiveSession {
session_id,
project,
message_queue,
rbx_session,
imfs,
_fs_watcher: fs_watcher,
})
}
/// Restarts the live session using the given project while preserving the
/// internal session ID.
pub fn restart_with_new_project(&mut self, project: Arc<Project>) -> Result<(), LiveSessionError> {
let mut new_session = LiveSession::new(project)?;
new_session.session_id = self.session_id;
mem::replace(self, new_session);
Ok(())
}
pub fn root_project(&self) -> &Project {
&self.project
}
pub fn session_id(&self) -> SessionId {
self.session_id
}
pub fn serve_place_ids(&self) -> &Option<HashSet<u64>> {
&self.project.serve_place_ids
}
}

View File

@@ -18,6 +18,14 @@ struct PathMapNode<T> {
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
pub struct PathMap<T> { pub struct PathMap<T> {
nodes: HashMap<PathBuf, PathMapNode<T>>, nodes: HashMap<PathBuf, PathMapNode<T>>,
/// Contains the set of all paths whose parent either does not exist, or is
/// not present in the PathMap.
///
/// Note that these paths may have other _ancestors_ in the tree, but if an
/// orphan's parent path is ever inserted, it will stop being an orphan. It
/// will be... adopted!
orphan_paths: HashSet<PathBuf>,
} }
impl<T> Default for PathMap<T> { impl<T> Default for PathMap<T> {
@@ -30,49 +38,71 @@ impl<T> PathMap<T> {
pub fn new() -> PathMap<T> { pub fn new() -> PathMap<T> {
PathMap { PathMap {
nodes: HashMap::new(), nodes: HashMap::new(),
orphan_paths: HashSet::new(),
} }
} }
pub fn get(&self, path: &Path) -> Option<&T> { pub fn get(&self, path: impl AsRef<Path>) -> Option<&T> {
self.nodes.get(path).map(|v| &v.value) self.nodes.get(path.as_ref()).map(|v| &v.value)
} }
pub fn get_mut(&mut self, path: &Path) -> Option<&mut T> { pub fn get_mut(&mut self, path: impl AsRef<Path>) -> Option<&mut T> {
self.nodes.get_mut(path).map(|v| &mut v.value) self.nodes.get_mut(path.as_ref()).map(|v| &mut v.value)
} }
pub fn insert(&mut self, path: PathBuf, value: T) { pub fn children(&self, path: impl AsRef<Path>) -> Option<Vec<&Path>> {
if let Some(parent_path) = path.parent() { self.nodes.get(path.as_ref()).map(|v| v.children.iter().map(AsRef::as_ref).collect())
if let Some(parent) = self.nodes.get_mut(parent_path) { }
parent.children.insert(path.to_path_buf());
pub fn contains_key(&self, path: impl AsRef<Path>) -> bool {
self.nodes.contains_key(path.as_ref())
}
pub fn insert(&mut self, path: impl Into<PathBuf>, value: T) {
let path = path.into();
self.add_to_parent(path.clone());
// Collect any children that are currently marked as orphaned paths, but
// are actually children of this new node.
let mut children = HashSet::new();
for orphan_path in &self.orphan_paths {
if orphan_path.parent() == Some(&path) {
children.insert(orphan_path.clone());
} }
} }
for child in &children {
self.orphan_paths.remove(child);
}
self.nodes.insert(path, PathMapNode { self.nodes.insert(path, PathMapNode {
value, value,
children: HashSet::new(), children,
}); });
} }
pub fn remove(&mut self, root_path: &Path) -> Option<T> { /// Remove the given path and all of its linked descendants, returning all
if let Some(parent_path) = root_path.parent() { /// values stored in the map.
if let Some(parent) = self.nodes.get_mut(parent_path) { pub fn remove(&mut self, root_path: impl AsRef<Path>) -> Vec<(PathBuf, T)> {
parent.children.remove(root_path); let root_path = root_path.as_ref();
}
}
let mut root_node = match self.nodes.remove(root_path) { self.remove_from_parent(root_path);
let (root_path, root_node) = match self.nodes.remove_entry(root_path) {
Some(node) => node, Some(node) => node,
None => return None, None => return Vec::new(),
}; };
let root_value = root_node.value; let mut removed_entries = vec![(root_path, root_node.value)];
let mut to_visit: Vec<PathBuf> = root_node.children.drain().collect(); let mut to_visit: Vec<PathBuf> = root_node.children.into_iter().collect();
while let Some(path) = to_visit.pop() { while let Some(path) = to_visit.pop() {
match self.nodes.remove(&path) { match self.nodes.remove_entry(&path) {
Some(mut node) => { Some((path, node)) => {
for child in node.children.drain() { removed_entries.push((path, node.value));
for child in node.children.into_iter() {
to_visit.push(child); to_visit.push(child);
} }
}, },
@@ -82,7 +112,7 @@ impl<T> PathMap<T> {
} }
} }
Some(root_value) removed_entries
} }
/// Traverses the route between `start_path` and `target_path` and returns /// Traverses the route between `start_path` and `target_path` and returns
@@ -93,10 +123,13 @@ impl<T> PathMap<T> {
/// FS events, a file remove event could be followed by that file's /// FS events, a file remove event could be followed by that file's
/// directory being removed, in which case we should process that /// directory being removed, in which case we should process that
/// directory's parent. /// directory's parent.
pub fn descend(&self, start_path: &Path, target_path: &Path) -> PathBuf { pub fn descend(&self, start_path: impl Into<PathBuf>, target_path: impl AsRef<Path>) -> PathBuf {
let relative_path = target_path.strip_prefix(start_path) let start_path = start_path.into();
let target_path = target_path.as_ref();
let relative_path = target_path.strip_prefix(&start_path)
.expect("target_path did not begin with start_path"); .expect("target_path did not begin with start_path");
let mut current_path = start_path.to_path_buf(); let mut current_path = start_path;
for component in relative_path.components() { for component in relative_path.components() {
match component { match component {
@@ -115,4 +148,131 @@ impl<T> PathMap<T> {
current_path current_path
} }
/// Adds the path to its parent if it's present in the tree, or the set of
/// orphaned paths if it is not.
fn add_to_parent(&mut self, path: PathBuf) {
if let Some(parent_path) = path.parent() {
if let Some(parent) = self.nodes.get_mut(parent_path) {
parent.children.insert(path);
return;
}
}
// In this branch, the path is orphaned because it either doesn't have a
// parent according to Path, or because its parent doesn't exist in the
// PathMap.
self.orphan_paths.insert(path);
}
/// Removes the path from its parent, or from the orphaned paths set if it
/// has no parent.
fn remove_from_parent(&mut self, path: &Path) {
if let Some(parent_path) = path.parent() {
if let Some(parent) = self.nodes.get_mut(parent_path) {
parent.children.remove(path);
return;
}
}
// In this branch, the path is orphaned because it either doesn't have a
// parent according to Path, or because its parent doesn't exist in the
// PathMap.
self.orphan_paths.remove(path);
}
}
#[cfg(test)]
mod test {
use super::*;
use maplit::hashset;
#[test]
fn smoke_test() {
let mut map = PathMap::new();
assert_eq!(map.get("/foo"), None);
map.insert("/foo", 5);
assert_eq!(map.get("/foo"), Some(&5));
map.insert("/foo/bar", 6);
assert_eq!(map.get("/foo"), Some(&5));
assert_eq!(map.get("/foo/bar"), Some(&6));
assert_eq!(map.children("/foo"), Some(vec![Path::new("/foo/bar")]));
}
#[test]
fn orphans() {
let mut map = PathMap::new();
map.insert("/foo/bar", 5);
assert_eq!(map.orphan_paths, hashset!["/foo/bar".into()]);
map.insert("/foo", 6);
assert_eq!(map.orphan_paths, hashset!["/foo".into()]);
}
#[test]
fn remove_one() {
let mut map = PathMap::new();
map.insert("/foo", 6);
assert_eq!(map.remove("/foo"), vec![
(PathBuf::from("/foo"), 6),
]);
assert_eq!(map.get("/foo"), None);
}
#[test]
fn remove_child() {
let mut map = PathMap::new();
map.insert("/foo", 6);
map.insert("/foo/bar", 12);
assert_eq!(map.remove("/foo"), vec![
(PathBuf::from("/foo"), 6),
(PathBuf::from("/foo/bar"), 12),
]);
assert_eq!(map.get("/foo"), None);
assert_eq!(map.get("/foo/bar"), None);
}
#[test]
fn remove_descendant() {
let mut map = PathMap::new();
map.insert("/foo", 6);
map.insert("/foo/bar", 12);
map.insert("/foo/bar/baz", 18);
assert_eq!(map.remove("/foo"), vec![
(PathBuf::from("/foo"), 6),
(PathBuf::from("/foo/bar"), 12),
(PathBuf::from("/foo/bar/baz"), 18),
]);
assert_eq!(map.get("/foo"), None);
assert_eq!(map.get("/foo/bar"), None);
assert_eq!(map.get("/foo/bar/baz"), None);
}
#[test]
fn remove_not_orphan_descendants() {
let mut map = PathMap::new();
map.insert("/foo", 6);
map.insert("/foo/bar/baz", 12);
assert_eq!(map.remove("/foo"), vec![
(PathBuf::from("/foo"), 6),
]);
assert_eq!(map.get("/foo"), None);
assert_eq!(map.get("/foo/bar/baz"), Some(&12));
}
} }

View File

@@ -8,7 +8,7 @@
//! To use, annotate your PathBuf or Option<PathBuf> field with the correct //! To use, annotate your PathBuf or Option<PathBuf> field with the correct
//! serializer function: //! serializer function:
//! //!
//! ``` //! ```ignore
//! # use std::path::PathBuf; //! # use std::path::PathBuf;
//! # use serde::{Serialize, Deserialize}; //! # use serde::{Serialize, Deserialize};
//! //!

View File

@@ -465,6 +465,12 @@ impl Project {
Ok(parsed.into_project(project_file_location)) Ok(parsed.into_project(project_file_location))
} }
pub fn load_from_slice(contents: &[u8], project_file_location: &Path) -> Result<Project, serde_json::Error> {
let parsed: SourceProject = serde_json::from_slice(&contents)?;
Ok(parsed.into_project(project_file_location))
}
pub fn load_fuzzy(fuzzy_project_location: &Path) -> Result<Project, ProjectLoadError> { pub fn load_fuzzy(fuzzy_project_location: &Path) -> Result<Project, ProjectLoadError> {
if let Some(project_path) = Self::locate(fuzzy_project_location) { if let Some(project_path) = Self::locate(fuzzy_project_location) {
Self::load_exact(&project_path) Self::load_exact(&project_path)

View File

@@ -1,289 +0,0 @@
use std::{
borrow::Cow,
collections::{HashSet, HashMap},
path::{Path, PathBuf},
str,
sync::{Arc, Mutex},
};
use rlua::Lua;
use serde::{Serialize, Deserialize};
use log::{info, trace, error};
use rbx_dom_weak::{RbxTree, RbxId};
use crate::{
project::{Project, ProjectNode},
message_queue::MessageQueue,
imfs::{Imfs, ImfsItem},
path_map::PathMap,
rbx_snapshot::{
SnapshotError,
SnapshotContext,
SnapshotPluginContext,
SnapshotPluginEntry,
snapshot_project_tree,
snapshot_project_node,
snapshot_imfs_path,
},
snapshot_reconciler::{InstanceChanges, reify_root, reconcile_subtree},
};
const INIT_SCRIPT: &str = "init.lua";
const INIT_SERVER_SCRIPT: &str = "init.server.lua";
const INIT_CLIENT_SCRIPT: &str = "init.client.lua";
/// `source_path` or `project_definition` or both must both be Some.
#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)]
pub struct MetadataPerInstance {
pub ignore_unknown_instances: bool,
/// The path on the filesystem that the instance was read from the
/// filesystem if it came from the filesystem.
#[serde(serialize_with = "crate::path_serializer::serialize_option")]
pub source_path: Option<PathBuf>,
/// Information about the instance that came from the project that defined
/// it, if that's where it was defined.
///
/// A key-value pair where the key should be the name of the instance and
/// the value is the ProjectNode from the instance's project.
pub project_definition: Option<(String, ProjectNode)>,
}
/// Contains all of the state needed to update an `RbxTree` in real time using
/// the in-memory filesystem, as well as messaging to Rojo clients what
/// instances have actually updated at any point.
pub struct RbxSession {
tree: RbxTree,
instances_per_path: PathMap<HashSet<RbxId>>,
metadata_per_instance: HashMap<RbxId, MetadataPerInstance>,
message_queue: Arc<MessageQueue<InstanceChanges>>,
imfs: Arc<Mutex<Imfs>>,
}
impl RbxSession {
pub fn new(
project: Arc<Project>,
imfs: Arc<Mutex<Imfs>>,
message_queue: Arc<MessageQueue<InstanceChanges>>,
) -> Result<RbxSession, SnapshotError> {
let mut instances_per_path = PathMap::new();
let mut metadata_per_instance = HashMap::new();
let plugin_context = if cfg!(feature = "server-plugins") {
let lua = Lua::new();
let mut callback_key = None;
lua.context(|context| {
let callback = context.load(r#"
return function(snapshot)
print("got my snapshot:", snapshot)
print("name:", snapshot.name, "class name:", snapshot.className)
end"#)
.set_name("a cool plugin").unwrap()
.call::<(), rlua::Function>(()).unwrap();
callback_key = Some(context.create_registry_value(callback).unwrap());
});
let plugins = vec![
SnapshotPluginEntry {
file_name_filter: String::new(),
callback: callback_key.unwrap(),
}
];
Some(SnapshotPluginContext { lua, plugins })
} else {
None
};
let context = SnapshotContext {
plugin_context,
};
let tree = {
let temp_imfs = imfs.lock().unwrap();
reify_initial_tree(
&project,
&context,
&temp_imfs,
&mut instances_per_path,
&mut metadata_per_instance,
)?
};
Ok(RbxSession {
tree,
instances_per_path,
metadata_per_instance,
message_queue,
imfs,
})
}
fn path_created_or_updated(&mut self, path: &Path) {
// TODO: Track paths actually updated in each step so we can ignore
// redundant changes.
let mut changes = InstanceChanges::default();
{
let imfs = self.imfs.lock().unwrap();
let root_path = imfs.get_root_for_path(path)
.expect("Path was outside in-memory filesystem roots");
// Find the closest instance in the tree that currently exists
let mut path_to_snapshot = self.instances_per_path.descend(root_path, path);
// If this is a file that might affect its parent if modified, we
// should snapshot its parent instead.
match path_to_snapshot.file_name().unwrap().to_str() {
Some(INIT_SCRIPT) | Some(INIT_SERVER_SCRIPT) | Some(INIT_CLIENT_SCRIPT) => {
path_to_snapshot.pop();
},
_ => {},
}
trace!("Snapshotting path {}", path_to_snapshot.display());
let instances_at_path = self.instances_per_path.get(&path_to_snapshot)
.expect("Metadata did not exist for path")
.clone();
let context = SnapshotContext {
plugin_context: None,
};
for instance_id in &instances_at_path {
let instance_metadata = self.metadata_per_instance.get(&instance_id)
.expect("Metadata for instance ID did not exist");
let maybe_snapshot = match &instance_metadata.project_definition {
Some((instance_name, project_node)) => {
snapshot_project_node(&context, &imfs, &project_node, Cow::Owned(instance_name.clone()))
// .unwrap_or_else(|_| panic!("Could not generate instance snapshot for path {}", path_to_snapshot.display()))
},
None => {
snapshot_imfs_path(&context, &imfs, &path_to_snapshot, None)
// .unwrap_or_else(|_| panic!("Could not generate instance snapshot for path {}", path_to_snapshot.display()))
},
};
let snapshot = match maybe_snapshot {
Ok(Some(snapshot)) => snapshot,
Ok(None) => {
trace!("Path resulted in no snapshot being generated.");
return;
},
Err(err) => {
error!("Rojo couldn't turn one of the project's files into Roblox instances.");
error!("Any changes to the file have been ignored.");
error!("{}", err);
return;
},
};
trace!("Snapshot: {:#?}", snapshot);
reconcile_subtree(
&mut self.tree,
*instance_id,
&snapshot,
&mut self.instances_per_path,
&mut self.metadata_per_instance,
&mut changes,
);
}
}
if changes.is_empty() {
trace!("No instance changes triggered from file update.");
} else {
trace!("Pushing changes: {}", changes);
self.message_queue.push_messages(&[changes]);
}
}
pub fn path_created(&mut self, path: &Path) {
info!("Path created: {}", path.display());
self.path_created_or_updated(path);
}
pub fn path_updated(&mut self, path: &Path) {
info!("Path updated: {}", path.display());
{
let imfs = self.imfs.lock().unwrap();
// If the path doesn't exist or is a directory, we don't care if it
// updated
match imfs.get(path) {
Some(ImfsItem::Directory(_)) => {
trace!("Updated path was a directory, ignoring.");
return;
},
None => {
trace!("Updated path did not exist in IMFS, ignoring.");
return;
},
Some(ImfsItem::File(_)) => {},
}
}
self.path_created_or_updated(path);
}
pub fn path_removed(&mut self, path: &Path) {
info!("Path removed: {}", path.display());
self.instances_per_path.remove(path);
self.path_created_or_updated(path);
}
pub fn path_renamed(&mut self, from_path: &Path, to_path: &Path) {
info!("Path renamed from {} to {}", from_path.display(), to_path.display());
self.instances_per_path.remove(from_path);
self.path_created_or_updated(from_path);
self.path_created_or_updated(to_path);
}
pub fn get_tree(&self) -> &RbxTree {
&self.tree
}
pub fn get_all_instance_metadata(&self) -> &HashMap<RbxId, MetadataPerInstance> {
&self.metadata_per_instance
}
pub fn get_instance_metadata(&self, id: RbxId) -> Option<&MetadataPerInstance> {
self.metadata_per_instance.get(&id)
}
}
pub fn construct_oneoff_tree(project: &Project, imfs: &Imfs) -> Result<RbxTree, SnapshotError> {
let mut instances_per_path = PathMap::new();
let mut metadata_per_instance = HashMap::new();
let context = SnapshotContext {
plugin_context: None,
};
reify_initial_tree(project, &context, imfs, &mut instances_per_path, &mut metadata_per_instance)
}
fn reify_initial_tree(
project: &Project,
context: &SnapshotContext,
imfs: &Imfs,
instances_per_path: &mut PathMap<HashSet<RbxId>>,
metadata_per_instance: &mut HashMap<RbxId, MetadataPerInstance>,
) -> Result<RbxTree, SnapshotError> {
let snapshot = match snapshot_project_tree(&context, imfs, project)? {
Some(snapshot) => snapshot,
None => panic!("Project did not produce any instances"),
};
let mut changes = InstanceChanges::default();
let tree = reify_root(&snapshot, instances_per_path, metadata_per_instance, &mut changes);
Ok(tree)
}

View File

@@ -1,896 +0,0 @@
//! Defines how Rojo transforms files into instances through the snapshot
//! system.
use std::{
borrow::Cow,
collections::HashMap,
fmt,
path::{Path, PathBuf},
str,
};
use rlua::Lua;
use failure::Fail;
use log::info;
use maplit::hashmap;
use rbx_dom_weak::{RbxTree, RbxValue, RbxInstanceProperties, UnresolvedRbxValue};
use serde::{Serialize, Deserialize};
use rbx_reflection::{try_resolve_value, ValueResolveError};
use crate::{
imfs::{
Imfs,
ImfsItem,
ImfsFile,
ImfsDirectory,
},
project::{
Project,
ProjectNode,
},
snapshot_reconciler::{
RbxSnapshotInstance,
snapshot_from_tree,
},
// TODO: Move MetadataPerInstance into this module?
rbx_session::MetadataPerInstance,
};
const INIT_MODULE_NAME: &str = "init.lua";
const INIT_SERVER_NAME: &str = "init.server.lua";
const INIT_CLIENT_NAME: &str = "init.client.lua";
pub struct SnapshotContext {
pub plugin_context: Option<SnapshotPluginContext>,
}
/// Context that's only relevant to generating snapshots if there are plugins
/// associated with the project.
///
/// It's possible that this needs some sort of extra nesting/filtering to
/// support nested projects, since their plugins should only apply to
/// themselves.
pub struct SnapshotPluginContext {
pub lua: Lua,
pub plugins: Vec<SnapshotPluginEntry>,
}
pub struct SnapshotPluginEntry {
/// Simple file name suffix filter to avoid running plugins on every file
/// change.
pub file_name_filter: String,
/// A key into the Lua registry created by [`create_registry_value`] that
/// refers to a function that can be called to transform a file/instance
/// pair according to how the plugin needs to operate.
///
/// [`create_registry_value`]: https://docs.rs/rlua/0.16.2/rlua/struct.Context.html#method.create_registry_value
pub callback: rlua::RegistryKey,
}
#[derive(Debug, Clone)]
struct LuaRbxSnapshot(RbxSnapshotInstance<'static>);
impl rlua::UserData for LuaRbxSnapshot {
fn add_methods<'lua, M: rlua::UserDataMethods<'lua, Self>>(methods: &mut M) {
methods.add_meta_method(rlua::MetaMethod::Index, |_context, this, key: String| {
match key.as_str() {
"name" => Ok(this.0.name.clone().into_owned()),
"className" => Ok(this.0.class_name.clone().into_owned()),
_ => Err(rlua::Error::RuntimeError(format!("{} is not a valid member of RbxSnapshotInstance", &key))),
}
});
methods.add_meta_method(rlua::MetaMethod::ToString, |_context, _this, _args: ()| {
Ok("RbxSnapshotInstance")
});
}
}
pub type SnapshotResult<'a> = Result<Option<RbxSnapshotInstance<'a>>, SnapshotError>;
#[derive(Debug, Fail)]
pub enum SnapshotError {
DidNotExist(PathBuf),
Utf8Error {
#[fail(cause)]
inner: str::Utf8Error,
path: PathBuf,
},
JsonModelDecodeError {
#[fail(cause)]
inner: serde_json::Error,
path: PathBuf,
},
ExtraMetadataError {
#[fail(cause)]
inner: serde_json::Error,
path: PathBuf,
},
InvalidMetadataModelField {
field_name: String,
path: PathBuf,
},
MetadataClassNameNonInit {
path: PathBuf,
},
XmlModelDecodeError {
#[fail(cause)]
inner: rbx_xml::DecodeError,
path: PathBuf,
},
BinaryModelDecodeError {
inner: rbx_binary::DecodeError,
path: PathBuf,
},
CsvDecodeError {
#[fail(cause)]
inner: csv::Error,
path: PathBuf,
},
ProjectNodeUnusable,
ProjectNodeInvalidTransmute {
partition_path: PathBuf,
},
PropertyResolveError {
#[fail(cause)]
inner: ValueResolveError,
},
}
impl From<ValueResolveError> for SnapshotError {
fn from(inner: ValueResolveError) -> SnapshotError {
SnapshotError::PropertyResolveError {
inner,
}
}
}
impl fmt::Display for SnapshotError {
fn fmt(&self, output: &mut fmt::Formatter) -> fmt::Result {
match self {
SnapshotError::DidNotExist(path) => write!(output, "Path did not exist: {}", path.display()),
SnapshotError::Utf8Error { inner, path } => {
write!(output, "Invalid UTF-8: {} in path {}", inner, path.display())
},
SnapshotError::JsonModelDecodeError { inner, path } => {
write!(output, "Malformed .model.json model: {} in path {}", inner, path.display())
},
SnapshotError::ExtraMetadataError { inner, path } => {
write!(output, "Malformed init.meta.json: {} in path {}", inner, path.display())
},
SnapshotError::InvalidMetadataModelField { field_name, path } => {
writeln!(output, "The field '{}' cannot be specified on .meta.json files attached to models.", field_name)?;
writeln!(output, "Model path: {}", path.display())
},
SnapshotError::MetadataClassNameNonInit { path } => {
writeln!(output, "The field 'className' cannot be specified on .meta.json files besides init.meta.json")?;
writeln!(output, "Model path: {}", path.display())
},
SnapshotError::XmlModelDecodeError { inner, path } => {
write!(output, "Malformed rbxmx model: {} in path {}", inner, path.display())
},
SnapshotError::BinaryModelDecodeError { inner, path } => {
write!(output, "Malformed rbxm model: {:?} in path {}", inner, path.display())
},
SnapshotError::CsvDecodeError { inner, path } => {
write!(output, "Malformed csv file: {} in path {}", inner, path.display())
},
SnapshotError::ProjectNodeUnusable => {
write!(output, "Rojo project nodes must specify either $path or $className.")
},
SnapshotError::ProjectNodeInvalidTransmute { partition_path } => {
writeln!(output, "Rojo project nodes that specify both $path and $className require that the")?;
writeln!(output, "instance produced by the files pointed to by $path has a ClassName of")?;
writeln!(output, "Folder.")?;
writeln!(output, "")?;
writeln!(output, "Partition target ($path): {}", partition_path.display())
},
SnapshotError::PropertyResolveError { inner } => write!(output, "{}", inner),
}
}
}
pub fn snapshot_project_tree<'source>(
context: &SnapshotContext,
imfs: &'source Imfs,
project: &'source Project,
) -> SnapshotResult<'source> {
snapshot_project_node(context, imfs, &project.tree, Cow::Borrowed(&project.name))
}
pub fn snapshot_project_node<'source>(
context: &SnapshotContext,
imfs: &'source Imfs,
node: &ProjectNode,
instance_name: Cow<'source, str>,
) -> SnapshotResult<'source> {
let maybe_snapshot = match &node.path {
Some(path) => snapshot_imfs_path(context, imfs, &path, Some(instance_name.clone()))?,
None => match &node.class_name {
Some(_class_name) => Some(RbxSnapshotInstance {
name: instance_name.clone(),
// These properties are replaced later in the function to
// reduce code duplication.
class_name: Cow::Borrowed("Folder"),
properties: HashMap::new(),
children: Vec::new(),
metadata: MetadataPerInstance {
source_path: None,
ignore_unknown_instances: true,
project_definition: None,
},
}),
None => {
return Err(SnapshotError::ProjectNodeUnusable);
},
},
};
// If the snapshot resulted in no instances, like if it targets an unknown
// file or an empty model file, we can early-return.
//
// In the future, we might want to issue a warning if the project also
// specified fields like class_name, since the user will probably be
// confused as to why nothing showed up in the tree.
let mut snapshot = match maybe_snapshot {
Some(snapshot) => snapshot,
None => {
// TODO: Return some other sort of marker here instead? If a node
// transitions from None into Some, it's possible that configuration
// from the ProjectNode might be lost since there's nowhere to put
// it!
return Ok(None);
},
};
// Applies the class name specified in `class_name` from the project, if it's
// set.
if let Some(class_name) = &node.class_name {
// This can only happen if `path` was specified in the project node and
// that path represented a non-Folder instance.
if snapshot.class_name != "Folder" {
return Err(SnapshotError::ProjectNodeInvalidTransmute {
partition_path: node.path.as_ref().unwrap().to_owned(),
});
}
snapshot.class_name = Cow::Owned(class_name.to_owned());
}
for (child_name, child_project_node) in &node.children {
if let Some(child) = snapshot_project_node(context, imfs, child_project_node, Cow::Owned(child_name.clone()))? {
snapshot.children.push(child);
}
}
for (key, value) in &node.properties {
let resolved_value = try_resolve_value(&snapshot.class_name, key, value)?;
snapshot.properties.insert(key.clone(), resolved_value);
}
if let Some(ignore_unknown_instances) = node.ignore_unknown_instances {
snapshot.metadata.ignore_unknown_instances = ignore_unknown_instances;
}
snapshot.metadata.project_definition = Some((instance_name.into_owned(), node.clone()));
Ok(Some(snapshot))
}
pub fn snapshot_imfs_path<'source>(
context: &SnapshotContext,
imfs: &'source Imfs,
path: &Path,
instance_name: Option<Cow<'source, str>>,
) -> SnapshotResult<'source> {
// If the given path doesn't exist in the in-memory filesystem, we consider
// that an error.
match imfs.get(path) {
Some(imfs_item) => snapshot_imfs_item(context, imfs, imfs_item, instance_name),
None => return Err(SnapshotError::DidNotExist(path.to_owned())),
}
}
fn snapshot_imfs_item<'source>(
context: &SnapshotContext,
imfs: &'source Imfs,
item: &'source ImfsItem,
instance_name: Option<Cow<'source, str>>,
) -> SnapshotResult<'source> {
match item {
ImfsItem::File(file) => snapshot_imfs_file(context, imfs, file, instance_name),
ImfsItem::Directory(directory) => snapshot_imfs_directory(context, imfs, directory, instance_name),
}
}
fn snapshot_imfs_directory<'source>(
context: &SnapshotContext,
imfs: &'source Imfs,
directory: &'source ImfsDirectory,
instance_name: Option<Cow<'source, str>>,
) -> SnapshotResult<'source> {
let init_path = directory.path.join(INIT_MODULE_NAME);
let init_server_path = directory.path.join(INIT_SERVER_NAME);
let init_client_path = directory.path.join(INIT_CLIENT_NAME);
let snapshot_name = instance_name
.unwrap_or_else(|| {
Cow::Borrowed(directory.path
.file_name().expect("Could not extract file name")
.to_str().expect("Could not convert path to UTF-8"))
});
let mut snapshot = if directory.children.contains(&init_path) {
snapshot_imfs_path(context, imfs, &init_path, Some(snapshot_name))?.unwrap()
} else if directory.children.contains(&init_server_path) {
snapshot_imfs_path(context, imfs, &init_server_path, Some(snapshot_name))?.unwrap()
} else if directory.children.contains(&init_client_path) {
snapshot_imfs_path(context, imfs, &init_client_path, Some(snapshot_name))?.unwrap()
} else {
RbxSnapshotInstance {
class_name: Cow::Borrowed("Folder"),
name: snapshot_name,
properties: HashMap::new(),
children: Vec::new(),
metadata: MetadataPerInstance {
source_path: None,
ignore_unknown_instances: false,
project_definition: None,
},
}
};
if let Some(meta) = ExtraMetadata::locate(&imfs, &directory.path.join("init"))? {
meta.apply(&mut snapshot)?;
}
snapshot.metadata.source_path = Some(directory.path.to_owned());
for child_path in &directory.children {
let child_name = child_path
.file_name().expect("Couldn't extract file name")
.to_str().expect("Couldn't convert file name to UTF-8");
if child_name.ends_with(".meta.json") {
// meta.json files don't turn into instances themselves, they just
// modify other instances.
continue;
}
match child_name {
INIT_MODULE_NAME | INIT_SERVER_NAME | INIT_CLIENT_NAME => {
// The existence of files with these names modifies the
// parent instance and is handled above, so we can skip
// them here.
continue;
}
_ => {}
}
if let Some(child) = snapshot_imfs_path(context, imfs, child_path, None)? {
snapshot.children.push(child);
}
}
Ok(Some(snapshot))
}
#[derive(Debug, Default, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
struct ExtraMetadata {
class_name: Option<String>,
ignore_unknown_instances: Option<bool>,
#[serde(default = "HashMap::new")]
properties: HashMap<String, UnresolvedRbxValue>,
}
impl ExtraMetadata {
fn apply(self, snapshot: &mut RbxSnapshotInstance) -> Result<(), SnapshotError> {
if let Some(meta_class) = self.class_name {
snapshot.class_name = Cow::Owned(meta_class);
}
if let Some(meta_ignore_instances) = self.ignore_unknown_instances {
snapshot.metadata.ignore_unknown_instances = meta_ignore_instances;
}
for (key, value) in self.properties {
let resolved_value = try_resolve_value(&snapshot.class_name, &key, &value)?;
snapshot.properties.insert(key, resolved_value);
}
Ok(())
}
fn locate(imfs: &Imfs, path: &Path) -> Result<Option<ExtraMetadata>, SnapshotError> {
match imfs.get(&path.with_extension("meta.json")) {
Some(ImfsItem::File(file)) => {
let meta: ExtraMetadata = serde_json::from_slice(&file.contents)
.map_err(|inner| SnapshotError::ExtraMetadataError {
inner,
path: file.path.to_path_buf(),
})?;
Ok(Some(meta))
}
_ => Ok(None)
}
}
fn validate_for_non_init(&self, path: &Path) -> Result<(), SnapshotError> {
if self.class_name.is_some() {
return Err(SnapshotError::MetadataClassNameNonInit {
path: path.to_owned(),
});
}
Ok(())
}
fn validate_for_model(&self, path: &Path) -> Result<(), SnapshotError> {
if self.class_name.is_some() {
return Err(SnapshotError::InvalidMetadataModelField {
field_name: "className".to_owned(),
path: path.to_owned(),
});
}
if !self.properties.is_empty() {
return Err(SnapshotError::InvalidMetadataModelField {
field_name: "properties".to_owned(),
path: path.to_owned(),
});
}
Ok(())
}
}
fn snapshot_imfs_file<'source>(
context: &SnapshotContext,
imfs: &'source Imfs,
file: &'source ImfsFile,
instance_name: Option<Cow<'source, str>>,
) -> SnapshotResult<'source> {
let extension = file.path.extension()
.map(|v| v.to_str().expect("Could not convert extension to UTF-8"));
let mut maybe_snapshot = match extension {
Some("lua") => snapshot_lua_file(file, imfs)?,
Some("csv") => snapshot_csv_file(file, imfs)?,
Some("txt") => snapshot_txt_file(file, imfs)?,
Some("rbxmx") => snapshot_xml_model_file(file, imfs)?,
Some("rbxm") => snapshot_binary_model_file(file, imfs)?,
Some("json") => {
let file_stem = file.path
.file_stem().expect("Could not extract file stem")
.to_str().expect("Could not convert path to UTF-8");
if file_stem.ends_with(".model") {
snapshot_json_model_file(file)?
} else {
None
}
},
Some(_) | None => None,
};
if let Some(mut snapshot) = maybe_snapshot.as_mut() {
// Carefully preserve name from project manifest if present.
if let Some(snapshot_name) = instance_name {
snapshot.name = snapshot_name;
}
} else {
info!("File generated no snapshot: {}", file.path.display());
}
if let Some(snapshot) = maybe_snapshot.as_ref() {
if let Some(plugin_context) = &context.plugin_context {
for plugin in &plugin_context.plugins {
let owned_snapshot = snapshot.get_owned();
let registry_key = &plugin.callback;
plugin_context.lua.context(move |context| {
let callback: rlua::Function = context.registry_value(registry_key).unwrap();
callback.call::<_, ()>(LuaRbxSnapshot(owned_snapshot)).unwrap();
});
}
}
}
Ok(maybe_snapshot)
}
fn snapshot_lua_file<'source>(
file: &'source ImfsFile,
imfs: &'source Imfs,
) -> SnapshotResult<'source> {
let file_stem = file.path
.file_stem().expect("Could not extract file stem")
.to_str().expect("Could not convert path to UTF-8");
let (instance_name, class_name) = if let Some(name) = match_trailing(file_stem, ".server") {
(name, "Script")
} else if let Some(name) = match_trailing(file_stem, ".client") {
(name, "LocalScript")
} else {
(file_stem, "ModuleScript")
};
let contents = str::from_utf8(&file.contents)
.map_err(|inner| SnapshotError::Utf8Error {
inner,
path: file.path.to_path_buf(),
})?;
let mut snapshot = RbxSnapshotInstance {
name: Cow::Borrowed(instance_name),
class_name: Cow::Borrowed(class_name),
properties: hashmap! {
"Source".to_owned() => RbxValue::String {
value: contents.to_owned(),
},
},
children: Vec::new(),
metadata: MetadataPerInstance {
source_path: Some(file.path.to_path_buf()),
ignore_unknown_instances: false,
project_definition: None,
},
};
if let Some(meta) = ExtraMetadata::locate(&imfs, &file.path.with_file_name(instance_name))? {
meta.validate_for_non_init(&file.path)?;
meta.apply(&mut snapshot)?;
}
Ok(Some(snapshot))
}
fn match_trailing<'a>(input: &'a str, trailer: &str) -> Option<&'a str> {
if input.ends_with(trailer) {
let end = input.len().saturating_sub(trailer.len());
Some(&input[..end])
} else {
None
}
}
fn snapshot_txt_file<'source>(
file: &'source ImfsFile,
imfs: &'source Imfs,
) -> SnapshotResult<'source> {
let instance_name = file.path
.file_stem().expect("Could not extract file stem")
.to_str().expect("Could not convert path to UTF-8");
let contents = str::from_utf8(&file.contents)
.map_err(|inner| SnapshotError::Utf8Error {
inner,
path: file.path.to_path_buf(),
})?;
let mut snapshot = RbxSnapshotInstance {
name: Cow::Borrowed(instance_name),
class_name: Cow::Borrowed("StringValue"),
properties: hashmap! {
"Value".to_owned() => RbxValue::String {
value: contents.to_owned(),
},
},
children: Vec::new(),
metadata: MetadataPerInstance {
source_path: Some(file.path.to_path_buf()),
ignore_unknown_instances: false,
project_definition: None,
},
};
if let Some(meta) = ExtraMetadata::locate(&imfs, &file.path)? {
meta.validate_for_non_init(&file.path)?;
meta.apply(&mut snapshot)?;
}
Ok(Some(snapshot))
}
fn snapshot_csv_file<'source>(
file: &'source ImfsFile,
imfs: &'source Imfs,
) -> SnapshotResult<'source> {
/// Struct that holds any valid row from a Roblox CSV translation table.
///
/// We manually deserialize into this table from CSV, but let JSON handle
/// serializing.
#[derive(Debug, Default, Serialize)]
#[serde(rename_all = "camelCase")]
struct LocalizationEntry<'a> {
#[serde(skip_serializing_if = "Option::is_none")]
key: Option<&'a str>,
#[serde(skip_serializing_if = "Option::is_none")]
context: Option<&'a str>,
#[serde(skip_serializing_if = "Option::is_none")]
example: Option<&'a str>,
#[serde(skip_serializing_if = "Option::is_none")]
source: Option<&'a str>,
values: HashMap<&'a str, &'a str>,
}
let instance_name = file.path
.file_stem().expect("Could not extract file stem")
.to_str().expect("Could not convert path to UTF-8");
// Normally, we'd be able to let the csv crate construct our struct for us.
//
// However, because of a limitation with Serde's 'flatten' feature, it's not
// possible presently to losslessly collect extra string values while using
// csv+Serde.
//
// https://github.com/BurntSushi/rust-csv/issues/151
let mut reader = csv::Reader::from_reader(file.contents.as_slice());
let headers = reader.headers()
.map_err(|inner| SnapshotError::CsvDecodeError {
inner,
path: file.path.to_path_buf(),
})?
.clone();
let mut records = Vec::new();
for record in reader.into_records() {
let record = record
.map_err(|inner| SnapshotError::CsvDecodeError {
inner,
path: file.path.to_path_buf(),
})?;
records.push(record);
}
let mut entries = Vec::new();
for record in &records {
let mut entry = LocalizationEntry::default();
for (header, value) in headers.iter().zip(record.into_iter()) {
if header.is_empty() || value.is_empty() {
continue;
}
match header {
"Key" => entry.key = Some(value),
"Source" => entry.source = Some(value),
"Context" => entry.context = Some(value),
"Example" => entry.example = Some(value),
_ => {
entry.values.insert(header, value);
}
}
}
if entry.key.is_none() && entry.source.is_none() {
continue;
}
entries.push(entry);
}
let table_contents = serde_json::to_string(&entries)
.expect("Could not encode JSON for localization table");
let mut snapshot = RbxSnapshotInstance {
name: Cow::Borrowed(instance_name),
class_name: Cow::Borrowed("LocalizationTable"),
properties: hashmap! {
"Contents".to_owned() => RbxValue::String {
value: table_contents,
},
},
children: Vec::new(),
metadata: MetadataPerInstance {
source_path: Some(file.path.to_path_buf()),
ignore_unknown_instances: false,
project_definition: None,
},
};
if let Some(meta) = ExtraMetadata::locate(&imfs, &file.path)? {
meta.validate_for_non_init(&file.path)?;
meta.apply(&mut snapshot)?;
}
Ok(Some(snapshot))
}
fn snapshot_json_model_file<'source>(
file: &'source ImfsFile,
) -> SnapshotResult<'source> {
let file_name = file.path
.file_name().expect("Could not extract file stem")
.to_str().expect("Could not convert path to UTF-8");
let instance_name = match_trailing(file_name, ".model.json")
.expect("JSON model file did not end in .model.json");
let contents = str::from_utf8(&file.contents)
.map_err(|inner| SnapshotError::Utf8Error {
inner,
path: file.path.to_owned(),
})?;
let json_instance: JsonModel = serde_json::from_str(contents)
.map_err(|inner| SnapshotError::JsonModelDecodeError {
inner,
path: file.path.to_owned(),
})?;
if let Some(json_name) = &json_instance.name {
if json_name != instance_name {
log::warn!("Name from JSON model did not match its file name: {}", file.path.display());
log::warn!("In Rojo < alpha 14, this model is named \"{}\" (from its 'Name' property)", json_name);
log::warn!("In Rojo >= alpha 14, this model is named \"{}\" (from its file name)", instance_name);
log::warn!("'Name' for the top-level instance in a JSON model is now optional and will be ignored.");
}
}
let mut snapshot = json_instance.core.into_snapshot(instance_name.to_owned())?;
snapshot.name = Cow::Borrowed(instance_name);
snapshot.metadata.source_path = Some(file.path.to_owned());
Ok(Some(snapshot))
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
struct JsonModel {
name: Option<String>,
#[serde(flatten)]
core: JsonModelCore,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
struct JsonModelInstance {
name: String,
#[serde(flatten)]
core: JsonModelCore,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
struct JsonModelCore {
class_name: String,
#[serde(default = "Vec::new", skip_serializing_if = "Vec::is_empty")]
children: Vec<JsonModelInstance>,
#[serde(default = "HashMap::new", skip_serializing_if = "HashMap::is_empty")]
properties: HashMap<String, UnresolvedRbxValue>,
}
impl JsonModelCore {
fn into_snapshot(self, name: String) -> Result<RbxSnapshotInstance<'static>, SnapshotError> {
let mut children = Vec::with_capacity(self.children.len());
for child in self.children {
children.push(child.core.into_snapshot(child.name)?);
}
let mut properties = HashMap::with_capacity(self.properties.len());
for (key, value) in self.properties {
let resolved_value = try_resolve_value(&self.class_name, &key, &value)?;
properties.insert(key, resolved_value);
}
Ok(RbxSnapshotInstance {
name: Cow::Owned(name),
class_name: Cow::Owned(self.class_name),
properties,
children,
metadata: Default::default(),
})
}
}
fn snapshot_xml_model_file<'source>(
file: &'source ImfsFile,
imfs: &'source Imfs,
) -> SnapshotResult<'source> {
let instance_name = file.path
.file_stem().expect("Could not extract file stem")
.to_str().expect("Could not convert path to UTF-8");
let options = rbx_xml::DecodeOptions::new()
.property_behavior(rbx_xml::DecodePropertyBehavior::ReadUnknown);
let temp_tree = rbx_xml::from_reader(file.contents.as_slice(), options)
.map_err(|inner| SnapshotError::XmlModelDecodeError {
inner,
path: file.path.clone(),
})?;
let root_instance = temp_tree.get_instance(temp_tree.get_root_id()).unwrap();
let children = root_instance.get_children_ids();
match children.len() {
0 => Ok(None),
1 => {
let mut snapshot = snapshot_from_tree(&temp_tree, children[0]).unwrap();
snapshot.name = Cow::Borrowed(instance_name);
snapshot.metadata.source_path = Some(file.path.clone());
if let Some(meta) = ExtraMetadata::locate(&imfs, &file.path)? {
meta.validate_for_model(&file.path)?;
meta.apply(&mut snapshot)?;
}
Ok(Some(snapshot))
},
_ => panic!("Rojo doesn't have support for model files with multiple roots yet"),
}
}
fn snapshot_binary_model_file<'source>(
file: &'source ImfsFile,
imfs: &'source Imfs,
) -> SnapshotResult<'source> {
let instance_name = file.path
.file_stem().expect("Could not extract file stem")
.to_str().expect("Could not convert path to UTF-8");
let mut temp_tree = RbxTree::new(RbxInstanceProperties {
name: "Temp".to_owned(),
class_name: "Folder".to_owned(),
properties: HashMap::new(),
});
let root_id = temp_tree.get_root_id();
rbx_binary::decode(&mut temp_tree, root_id, file.contents.as_slice())
.map_err(|inner| SnapshotError::BinaryModelDecodeError {
inner,
path: file.path.clone(),
})?;
let root_instance = temp_tree.get_instance(root_id).unwrap();
let children = root_instance.get_children_ids();
match children.len() {
0 => Ok(None),
1 => {
let mut snapshot = snapshot_from_tree(&temp_tree, children[0]).unwrap();
snapshot.name = Cow::Borrowed(instance_name);
snapshot.metadata.source_path = Some(file.path.clone());
if let Some(meta) = ExtraMetadata::locate(&imfs, &file.path)? {
meta.validate_for_model(&file.path)?;
meta.apply(&mut snapshot)?;
}
Ok(Some(snapshot))
},
_ => panic!("Rojo doesn't have support for model files with multiple roots yet"),
}
}

View File

@@ -0,0 +1,33 @@
use std::collections::HashSet;
use crate::{
project::Project,
session_id::SessionId,
};
/// Contains all of the state for a Rojo serve session.
pub struct ServeSession {
root_project: Option<Project>,
session_id: SessionId,
}
impl ServeSession {
pub fn new(root_project: Option<Project>) -> ServeSession {
let session_id = SessionId::new();
ServeSession {
session_id,
root_project,
}
}
pub fn session_id(&self) -> SessionId {
self.session_id
}
pub fn serve_place_ids(&self) -> Option<&HashSet<u64>> {
self.root_project
.as_ref()
.and_then(|project| project.serve_place_ids.as_ref())
}
}

View File

@@ -0,0 +1,62 @@
//! Defines the structure of an instance snapshot.
use std::{
borrow::Cow,
collections::HashMap,
};
use rbx_dom_weak::{RbxTree, RbxId, RbxValue};
/// A lightweight description of what an instance should look like. Attempts to
/// be somewhat memory efficient by borrowing from its source data, indicated by
/// the lifetime parameter, `'source`.
///
// Possible future improvements:
// - Use refcounted/interned strings
// - Replace use of RbxValue with a sum of RbxValue + borrowed value
#[derive(Debug, Clone, PartialEq)]
pub struct InstanceSnapshot<'source> {
pub snapshot_id: Option<RbxId>,
pub name: Cow<'source, str>,
pub class_name: Cow<'source, str>,
pub properties: HashMap<String, RbxValue>,
pub children: Vec<InstanceSnapshot<'source>>,
// TODO: Snapshot source, like a file or a project node?
}
impl<'source> InstanceSnapshot<'source> {
pub fn get_owned(&'source self) -> InstanceSnapshot<'static> {
let children: Vec<InstanceSnapshot<'static>> = self.children.iter()
.map(InstanceSnapshot::get_owned)
.collect();
InstanceSnapshot {
snapshot_id: None,
name: Cow::Owned(self.name.clone().into_owned()),
class_name: Cow::Owned(self.class_name.clone().into_owned()),
properties: self.properties.clone(),
children,
}
}
pub fn from_tree(tree: &RbxTree, id: RbxId) -> InstanceSnapshot<'static> {
let instance = tree.get_instance(id)
.expect("instance did not exist in tree");
let children = instance.get_children_ids()
.iter()
.cloned()
.map(|id| InstanceSnapshot::from_tree(tree, id))
.collect();
InstanceSnapshot {
snapshot_id: Some(id),
name: Cow::Owned(instance.name.clone()),
class_name: Cow::Owned(instance.class_name.clone()),
properties: instance.properties.clone(),
children,
}
}
}

View File

@@ -0,0 +1,29 @@
//! This module defines the instance snapshot subsystem of Rojo.
//!
//! It defines a way to define the instance tree of a project as a pure function
//! of the filesystem by providing a lightweight instance 'snapshot' type, a
//! method to generate minimal patches, and a method that applies those patches.
//!
//! The aim with this approach is to reduce the number of bugs that arise from
//! attempting to manually update instances in response to filesystem updates.
//! Instead of surgically identifying what needs to change, we can do rough
//! "damage-painting", running our relatively fast snapshot function over
//! anything that could have changed and running it through a diffing function
//! to minimize the set of real changes.
//!
//! Building out a snapshot reconciler is mostly overkill for scripts, since
//! their relationships are mostly simple and well-defined. It becomes very
//! important, however, when dealing with large opaque model files and
//! user-defined plugins.
#![allow(dead_code)]
mod patch;
mod patch_apply;
mod patch_compute;
mod instance_snapshot;
pub use instance_snapshot::InstanceSnapshot;
pub use patch_apply::apply_patch_set;
pub use patch_compute::compute_patch_set;
pub use patch::*;

View File

@@ -0,0 +1,44 @@
//! Defines the data structures used for describing instance patches.
use std::collections::HashMap;
use rbx_dom_weak::{RbxValue, RbxId};
use super::InstanceSnapshot;
/// A set of different kinds of patches that can be applied to an RbxTree.
#[derive(Debug, Default, Clone, PartialEq)]
pub struct PatchSet<'a> {
pub removed_instances: Vec<RbxId>,
pub added_instances: Vec<PatchAddInstance<'a>>,
pub updated_instances: Vec<PatchUpdateInstance>,
}
impl<'a> PatchSet<'a> {
pub fn new() -> PatchSet<'a> {
PatchSet {
removed_instances: Vec::new(),
added_instances: Vec::new(),
updated_instances: Vec::new(),
}
}
}
/// A patch containing an instance that was added to the tree.
#[derive(Debug, Clone, PartialEq)]
pub struct PatchAddInstance<'a> {
pub parent_id: RbxId,
pub instance: InstanceSnapshot<'a>,
}
/// A patch indicating that properties (or the name) of an instance changed.
#[derive(Debug, Clone, PartialEq)]
pub struct PatchUpdateInstance {
pub id: RbxId,
pub changed_name: Option<String>,
pub changed_class_name: Option<String>,
/// Contains all changed properties. If a property is assigned to `None`,
/// then that property has been removed.
pub changed_properties: HashMap<String, Option<RbxValue>>,
}

View File

@@ -0,0 +1,239 @@
//! Defines the algorithm for applying generated patches.
use std::collections::HashMap;
use rbx_dom_weak::{RbxTree, RbxValue, RbxId, RbxInstanceProperties};
use super::{
patch::{PatchSet, PatchUpdateInstance},
InstanceSnapshot,
};
pub fn apply_patch_set(
tree: &mut RbxTree,
patch_set: &PatchSet,
) {
let mut context = PatchApplyContext::default();
for removed_id in &patch_set.removed_instances {
tree.remove_instance(*removed_id);
}
for add_patch in &patch_set.added_instances {
apply_add_child(&mut context, tree, add_patch.parent_id, &add_patch.instance);
}
for update_patch in &patch_set.updated_instances {
apply_update_child(&context, tree, update_patch);
}
apply_deferred_properties(context, tree);
}
#[derive(Default)]
struct PatchApplyContext {
snapshot_id_to_instance_id: HashMap<RbxId, RbxId>,
properties_to_apply: HashMap<RbxId, HashMap<String, RbxValue>>,
}
/// Apply properties that were deferred in order to get more information.
///
/// Ref properties from snapshots refer to eachother via snapshot ID. Some of
/// these properties are transformed when the patch is computed, notably the
/// instances that the patch computing method is able to pair up.
///
/// The remaining Ref properties need to be handled during patch application,
/// where we build up a map of snapshot IDs to instance IDs as they're created,
/// then apply properties all at once at the end.
fn apply_deferred_properties(context: PatchApplyContext, tree: &mut RbxTree) {
for (id, mut properties) in context.properties_to_apply {
let instance = tree.get_instance_mut(id)
.expect("Invalid instance ID in deferred property map");
for property_value in properties.values_mut() {
if let RbxValue::Ref { value: Some(id) } = property_value {
if let Some(&instance_id) = context.snapshot_id_to_instance_id.get(id) {
*property_value = RbxValue::Ref { value: Some(instance_id) };
}
}
}
instance.properties = properties;
}
}
fn apply_add_child(
context: &mut PatchApplyContext,
tree: &mut RbxTree,
parent_id: RbxId,
snapshot: &InstanceSnapshot,
) {
let properties = RbxInstanceProperties {
name: snapshot.name.clone().into_owned(),
class_name: snapshot.class_name.clone().into_owned(),
// Property assignment is deferred until after we know about all
// instances in this patch.
properties: HashMap::new(),
};
let id = tree.insert_instance(properties, parent_id);
context.properties_to_apply.insert(id, snapshot.properties.clone());
if let Some(snapshot_id) = snapshot.snapshot_id {
context.snapshot_id_to_instance_id.insert(snapshot_id, id);
}
for child_snapshot in &snapshot.children {
apply_add_child(context, tree, id, child_snapshot);
}
}
fn apply_update_child(
context: &PatchApplyContext,
tree: &mut RbxTree,
patch: &PatchUpdateInstance,
) {
let instance = tree.get_instance_mut(patch.id)
.expect("Instance referred to by patch does not exist");
if let Some(name) = &patch.changed_name {
instance.name = name.clone();
}
if let Some(class_name) = &patch.changed_class_name {
instance.class_name = class_name.clone();
}
for (key, property_entry) in &patch.changed_properties {
match property_entry {
// Ref values need to be potentially rewritten from snapshot IDs to
// instance IDs if they referred to an instance that was created as
// part of this patch.
Some(RbxValue::Ref { value: Some(id) }) => {
let new_id = context.snapshot_id_to_instance_id.get(id).unwrap_or(id);
instance.properties.insert(key.clone(), RbxValue::Ref {
value: Some(*new_id),
});
}
Some(value) => {
instance.properties.insert(key.clone(), value.clone());
}
None => {
instance.properties.remove(key);
}
}
}
}
#[cfg(test)]
mod test {
use super::*;
use std::{
borrow::Cow,
collections::HashMap,
};
use maplit::hashmap;
use rbx_dom_weak::RbxValue;
use super::super::patch::PatchAddInstance;
#[test]
fn add_from_empty() {
let _ = env_logger::try_init();
let mut tree = RbxTree::new(RbxInstanceProperties {
name: "Folder".to_owned(),
class_name: "Folder".to_owned(),
properties: HashMap::new(),
});
let root_id = tree.get_root_id();
let snapshot = InstanceSnapshot {
snapshot_id: None,
name: Cow::Borrowed("Foo"),
class_name: Cow::Borrowed("Bar"),
properties: hashmap! {
"Baz".to_owned() => RbxValue::Int32 { value: 5 },
},
children: Vec::new(),
};
let patch_set = PatchSet {
added_instances: vec![
PatchAddInstance {
parent_id: root_id,
instance: snapshot.clone(),
}
],
..Default::default()
};
apply_patch_set(&mut tree, &patch_set);
let root_instance = tree.get_instance(root_id).unwrap();
let child_id = root_instance.get_children_ids()[0];
let child_instance = tree.get_instance(child_id).unwrap();
assert_eq!(child_instance.name.as_str(), &snapshot.name);
assert_eq!(child_instance.class_name.as_str(), &snapshot.class_name);
assert_eq!(&child_instance.properties, &snapshot.properties);
assert!(child_instance.get_children_ids().is_empty());
}
#[test]
fn update_existing() {
let _ = env_logger::try_init();
let mut tree = RbxTree::new(RbxInstanceProperties {
name: "OldName".to_owned(),
class_name: "OldClassName".to_owned(),
properties: hashmap! {
"Foo".to_owned() => RbxValue::Int32 { value: 7 },
"Bar".to_owned() => RbxValue::Int32 { value: 3 },
"Unchanged".to_owned() => RbxValue::Int32 { value: -5 },
},
});
let root_id = tree.get_root_id();
let patch = PatchUpdateInstance {
id: root_id,
changed_name: Some("Foo".to_owned()),
changed_class_name: Some("NewClassName".to_owned()),
changed_properties: hashmap! {
// The value of Foo has changed
"Foo".to_owned() => Some(RbxValue::Int32 { value: 8 }),
// Bar has been deleted
"Bar".to_owned() => None,
// Baz has been added
"Baz".to_owned() => Some(RbxValue::Int32 { value: 10 }),
},
};
let patch_set = PatchSet {
updated_instances: vec![patch],
..Default::default()
};
apply_patch_set(&mut tree, &patch_set);
let expected_properties = hashmap! {
"Foo".to_owned() => RbxValue::Int32 { value: 8 },
"Baz".to_owned() => RbxValue::Int32 { value: 10 },
"Unchanged".to_owned() => RbxValue::Int32 { value: -5 },
};
let root_instance = tree.get_instance(root_id).unwrap();
assert_eq!(root_instance.name, "Foo");
assert_eq!(root_instance.class_name, "NewClassName");
assert_eq!(root_instance.properties, expected_properties);
}
}

View File

@@ -0,0 +1,322 @@
//! Defines the algorithm for computing a roughly-minimal patch set given an
//! existing instance tree and an instance snapshot.
use std::collections::{HashMap, HashSet};
use rbx_dom_weak::{RbxTree, RbxValue, RbxId, RbxInstance};
use super::{
InstanceSnapshot,
patch::{PatchSet, PatchAddInstance, PatchUpdateInstance},
};
pub fn compute_patch_set<'a>(
snapshot: &'a InstanceSnapshot,
tree: &RbxTree,
id: RbxId,
) -> PatchSet<'a> {
let mut patch_set = PatchSet::new();
let mut context = ComputePatchContext::default();
compute_patch_set_internal(&mut context, snapshot, tree, id, &mut patch_set);
// Rewrite Ref properties to refer to instance IDs instead of snapshot IDs
// for all of the IDs that we know about so far.
rewrite_refs_in_updates(&context, &mut patch_set.updated_instances);
rewrite_refs_in_additions(&context, &mut patch_set.added_instances);
patch_set
}
#[derive(Default)]
struct ComputePatchContext {
snapshot_id_to_instance_id: HashMap<RbxId, RbxId>,
}
fn rewrite_refs_in_updates(context: &ComputePatchContext, updates: &mut [PatchUpdateInstance]) {
for update in updates {
for property_value in update.changed_properties.values_mut() {
if let Some(RbxValue::Ref { value: Some(id) }) = property_value {
if let Some(&instance_id) = context.snapshot_id_to_instance_id.get(id) {
*property_value = Some(RbxValue::Ref { value: Some(instance_id) });
}
}
}
}
}
fn rewrite_refs_in_additions(context: &ComputePatchContext, additions: &mut [PatchAddInstance]) {
for addition in additions {
rewrite_refs_in_snapshot(context, &mut addition.instance);
}
}
fn rewrite_refs_in_snapshot(context: &ComputePatchContext, snapshot: &mut InstanceSnapshot) {
for property_value in snapshot.properties.values_mut() {
if let RbxValue::Ref { value: Some(id) } = property_value {
if let Some(&instance_id) = context.snapshot_id_to_instance_id.get(id) {
*property_value = RbxValue::Ref { value: Some(instance_id) };
}
}
}
for child in &mut snapshot.children {
rewrite_refs_in_snapshot(context, child);
}
}
fn compute_patch_set_internal<'a>(
context: &mut ComputePatchContext,
snapshot: &'a InstanceSnapshot,
tree: &RbxTree,
id: RbxId,
patch_set: &mut PatchSet<'a>,
) {
if let Some(snapshot_id) = snapshot.snapshot_id {
context.snapshot_id_to_instance_id.insert(snapshot_id, id);
}
let instance = tree.get_instance(id)
.expect("Instance did not exist in tree");
compute_property_patches(snapshot, instance, patch_set);
compute_children_patches(context, snapshot, tree, id, patch_set);
}
fn compute_property_patches(
snapshot: &InstanceSnapshot,
instance: &RbxInstance,
patch_set: &mut PatchSet,
) {
let mut visited_properties = HashSet::new();
let mut changed_properties = HashMap::new();
let changed_name = if snapshot.name == instance.name {
None
} else {
Some(snapshot.name.clone().into_owned())
};
let changed_class_name = if snapshot.class_name == instance.class_name {
None
} else {
Some(snapshot.class_name.clone().into_owned())
};
for (name, snapshot_value) in &snapshot.properties {
visited_properties.insert(name.as_str());
match instance.properties.get(name) {
Some(instance_value) => {
if snapshot_value != instance_value {
changed_properties.insert(name.clone(), Some(snapshot_value.clone()));
}
}
None => {
changed_properties.insert(name.clone(), Some(snapshot_value.clone()));
}
}
}
for name in instance.properties.keys() {
if visited_properties.contains(name.as_str()) {
continue;
}
changed_properties.insert(name.clone(), None);
}
if changed_properties.is_empty() && changed_name.is_none() {
return;
}
patch_set.updated_instances.push(PatchUpdateInstance {
id: instance.get_id(),
changed_name,
changed_class_name,
changed_properties,
});
}
fn compute_children_patches<'a>(
context: &mut ComputePatchContext,
snapshot: &'a InstanceSnapshot,
tree: &RbxTree,
id: RbxId,
patch_set: &mut PatchSet<'a>,
) {
let instance = tree.get_instance(id)
.expect("Instance did not exist in tree");
let instance_children = instance.get_children_ids();
let mut paired_instances = vec![false; instance_children.len()];
for snapshot_child in snapshot.children.iter() {
let matching_instance = instance_children
.iter()
.enumerate()
.find(|(instance_index, instance_child_id)| {
if paired_instances[*instance_index] {
return false;
}
let instance_child = tree.get_instance(**instance_child_id)
.expect("Instance did not exist in tree");
if snapshot_child.name == instance_child.name &&
instance_child.class_name == instance_child.class_name
{
paired_instances[*instance_index] = true;
return true;
}
false
});
match matching_instance {
Some((_, instance_child_id)) => {
compute_patch_set_internal(context, snapshot_child, tree, *instance_child_id, patch_set);
}
None => {
patch_set.added_instances.push(PatchAddInstance {
parent_id: id,
instance: snapshot_child.clone(),
});
}
}
}
for (instance_index, instance_child_id) in instance_children.iter().enumerate() {
if paired_instances[instance_index] {
continue;
}
patch_set.removed_instances.push(*instance_child_id);
}
}
#[cfg(test)]
mod test {
use super::*;
use std::borrow::Cow;
use maplit::hashmap;
use rbx_dom_weak::RbxInstanceProperties;
/// This test makes sure that rewriting refs in instance update patches to
/// instances that already exists works. We should be able to correlate the
/// snapshot ID and instance ID during patch computation and replace the
/// value before returning from compute_patch_set.
#[test]
fn rewrite_ref_existing_instance_update() {
let tree = RbxTree::new(RbxInstanceProperties {
name: "foo".to_owned(),
class_name: "foo".to_owned(),
properties: HashMap::new(),
});
let root_id = tree.get_root_id();
// This snapshot should be identical to the existing tree except for the
// addition of a prop named Self, which is a self-referential Ref.
let snapshot_id = RbxId::new();
let snapshot = InstanceSnapshot {
snapshot_id: Some(snapshot_id),
properties: hashmap! {
"Self".to_owned() => RbxValue::Ref {
value: Some(snapshot_id),
}
},
name: Cow::Borrowed("foo"),
class_name: Cow::Borrowed("foo"),
children: Vec::new(),
};
let patch_set = compute_patch_set(&snapshot, &tree, root_id);
let expected_patch_set = PatchSet {
updated_instances: vec![
PatchUpdateInstance {
id: root_id,
changed_name: None,
changed_class_name: None,
changed_properties: hashmap! {
"Self".to_owned() => Some(RbxValue::Ref {
value: Some(root_id),
}),
},
},
],
added_instances: Vec::new(),
removed_instances: Vec::new(),
};
assert_eq!(patch_set, expected_patch_set);
}
/// The same as rewrite_ref_existing_instance_update, except that the
/// property is added in a new instance instead of modifying an existing
/// one.
#[test]
fn rewrite_ref_existing_instance_addition() {
let tree = RbxTree::new(RbxInstanceProperties {
name: "foo".to_owned(),
class_name: "foo".to_owned(),
properties: HashMap::new(),
});
let root_id = tree.get_root_id();
// This patch describes the existing instance with a new child added.
let snapshot_id = RbxId::new();
let snapshot = InstanceSnapshot {
snapshot_id: Some(snapshot_id),
children: vec![
InstanceSnapshot {
properties: hashmap! {
"Self".to_owned() => RbxValue::Ref {
value: Some(snapshot_id),
},
},
snapshot_id: None,
name: Cow::Borrowed("child"),
class_name: Cow::Borrowed("child"),
children: Vec::new(),
}
],
properties: HashMap::new(),
name: Cow::Borrowed("foo"),
class_name: Cow::Borrowed("foo"),
};
let patch_set = compute_patch_set(&snapshot, &tree, root_id);
let expected_patch_set = PatchSet {
added_instances: vec![
PatchAddInstance {
parent_id: root_id,
instance: InstanceSnapshot {
snapshot_id: None,
properties: hashmap! {
"Self".to_owned() => RbxValue::Ref {
value: Some(root_id),
},
},
name: Cow::Borrowed("child"),
class_name: Cow::Borrowed("child"),
children: Vec::new(),
},
},
],
updated_instances: Vec::new(),
removed_instances: Vec::new(),
};
assert_eq!(patch_set, expected_patch_set);
}
}

View File

@@ -0,0 +1,7 @@
pub struct InstanceSnapshotContext {
/// Empty struct that will be used later to fill out required Lua state for
/// user plugins.
pub plugin_context: Option<()>,
}
pub struct ImfsSnapshotContext;

View File

@@ -0,0 +1,174 @@
use std::{
borrow::Cow,
collections::BTreeMap,
};
use maplit::hashmap;
use rbx_dom_weak::{RbxTree, RbxValue, RbxId};
use serde::Serialize;
use crate::{
imfs::new::{Imfs, ImfsFetcher, ImfsEntry},
snapshot::InstanceSnapshot,
};
use super::{
middleware::{SnapshotMiddleware, SnapshotInstanceResult, SnapshotFileResult},
};
pub struct SnapshotCsv;
impl SnapshotMiddleware for SnapshotCsv {
fn from_imfs<F: ImfsFetcher>(
imfs: &mut Imfs<F>,
entry: &ImfsEntry,
) -> SnapshotInstanceResult<'static> {
if entry.is_directory() {
return Ok(None);
}
let file_name = entry.path()
.file_name().unwrap().to_string_lossy();
if !file_name.ends_with(".csv") {
return Ok(None);
}
let instance_name = entry.path()
.file_stem().expect("Could not extract file stem")
.to_string_lossy().to_string();
let table_contents = convert_localization_csv(entry.contents(imfs)?);
Ok(Some(InstanceSnapshot {
snapshot_id: None,
name: Cow::Owned(instance_name),
class_name: Cow::Borrowed("LocalizationTable"),
properties: hashmap! {
"Contents".to_owned() => RbxValue::String {
value: table_contents,
},
},
children: Vec::new(),
}))
}
fn from_instance(
_tree: &RbxTree,
_id: RbxId,
) -> SnapshotFileResult {
unimplemented!("Snapshotting CSV localization tables");
}
}
/// Struct that holds any valid row from a Roblox CSV translation table.
///
/// We manually deserialize into this table from CSV, but let serde_json handle
/// serialization.
#[derive(Debug, Default, Serialize)]
#[serde(rename_all = "camelCase")]
struct LocalizationEntry<'a> {
#[serde(skip_serializing_if = "Option::is_none")]
key: Option<&'a str>,
#[serde(skip_serializing_if = "Option::is_none")]
context: Option<&'a str>,
#[serde(skip_serializing_if = "Option::is_none")]
example: Option<&'a str>,
#[serde(skip_serializing_if = "Option::is_none")]
source: Option<&'a str>,
// We use a BTreeMap here to get deterministic output order.
values: BTreeMap<&'a str, &'a str>,
}
/// Normally, we'd be able to let the csv crate construct our struct for us.
///
/// However, because of a limitation with Serde's 'flatten' feature, it's not
/// possible presently to losslessly collect extra string values while using
/// csv+Serde.
///
/// https://github.com/BurntSushi/rust-csv/issues/151
///
/// This function operates in one step in order to minimize data-copying.
fn convert_localization_csv(contents: &[u8]) -> String {
let mut reader = csv::Reader::from_reader(contents);
let headers = reader.headers()
.expect("TODO: Handle csv errors")
.clone();
let mut records = Vec::new();
for record in reader.into_records() {
let record = record
.expect("TODO: Handle csv errors");
records.push(record);
}
let mut entries = Vec::new();
for record in &records {
let mut entry = LocalizationEntry::default();
for (header, value) in headers.iter().zip(record.into_iter()) {
if header.is_empty() || value.is_empty() {
continue;
}
match header {
"Key" => entry.key = Some(value),
"Source" => entry.source = Some(value),
"Context" => entry.context = Some(value),
"Example" => entry.example = Some(value),
_ => {
entry.values.insert(header, value);
}
}
}
if entry.key.is_none() && entry.source.is_none() {
continue;
}
entries.push(entry);
}
serde_json::to_string(&entries)
.expect("Could not encode JSON for localization table")
}
#[cfg(test)]
mod test {
use super::*;
use crate::imfs::new::{ImfsSnapshot, NoopFetcher};
#[test]
fn csv_from_imfs() {
let mut imfs = Imfs::new(NoopFetcher);
let file = ImfsSnapshot::file(r#"
Key,Source,Context,Example,es
Ack,Ack!,,An exclamation of despair,¡Ay!"#);
imfs.load_from_snapshot("/foo.csv", file);
let entry = imfs.get("/foo.csv").unwrap();
let instance_snapshot = SnapshotCsv::from_imfs(&mut imfs, &entry).unwrap().unwrap();
let expected_contents =
r#"[{"key":"Ack","example":"An exclamation of despair","source":"Ack!","values":{"es":"¡Ay!"}}]"#;
assert_eq!(instance_snapshot.name, "foo");
assert_eq!(instance_snapshot.class_name, "LocalizationTable");
assert_eq!(instance_snapshot.children, Vec::new());
assert_eq!(instance_snapshot.properties, hashmap! {
"Contents".to_owned() => RbxValue::String {
value: expected_contents.to_owned(),
},
});
}
}

View File

@@ -0,0 +1,126 @@
use std::{
borrow::Cow,
collections::HashMap,
};
use rbx_dom_weak::{RbxTree, RbxId};
use crate::{
imfs::new::{Imfs, ImfsSnapshot, DirectorySnapshot, ImfsFetcher, ImfsEntry},
snapshot::InstanceSnapshot,
};
use super::{
snapshot_from_imfs,
snapshot_from_instance,
middleware::{SnapshotMiddleware, SnapshotInstanceResult, SnapshotFileResult},
};
pub struct SnapshotDir;
impl SnapshotMiddleware for SnapshotDir {
fn from_imfs<F: ImfsFetcher>(
imfs: &mut Imfs<F>,
entry: &ImfsEntry,
) -> SnapshotInstanceResult<'static> {
if entry.is_file() {
return Ok(None);
}
let children: Vec<ImfsEntry> = entry.children(imfs)?;
let mut snapshot_children = Vec::new();
for child in children.into_iter() {
if let Some(child_snapshot) = snapshot_from_imfs(imfs, &child)? {
snapshot_children.push(child_snapshot);
}
}
let instance_name = entry.path()
.file_name().expect("Could not extract file name")
.to_str().unwrap().to_string();
Ok(Some(InstanceSnapshot {
snapshot_id: None,
name: Cow::Owned(instance_name),
class_name: Cow::Borrowed("Folder"),
properties: HashMap::new(),
children: snapshot_children,
}))
}
fn from_instance(
tree: &RbxTree,
id: RbxId,
) -> SnapshotFileResult {
let instance = tree.get_instance(id).unwrap();
if instance.class_name != "Folder" {
return None;
}
let mut children = HashMap::new();
for child_id in instance.get_children_ids() {
if let Some((name, child)) = snapshot_from_instance(tree, *child_id) {
children.insert(name, child);
}
}
let snapshot = ImfsSnapshot::Directory(DirectorySnapshot {
children,
});
Some((instance.name.clone(), snapshot))
}
}
#[cfg(test)]
mod test {
use super::*;
use maplit::hashmap;
use crate::imfs::new::NoopFetcher;
#[test]
fn empty_folder() {
let mut imfs = Imfs::new(NoopFetcher);
let dir = ImfsSnapshot::dir::<String>(HashMap::new());
imfs.load_from_snapshot("/foo", dir);
let entry = imfs.get("/foo").unwrap();
let instance_snapshot = SnapshotDir::from_imfs(&mut imfs, &entry).unwrap().unwrap();
assert_eq!(instance_snapshot.name, "foo");
assert_eq!(instance_snapshot.class_name, "Folder");
assert_eq!(instance_snapshot.properties, HashMap::new());
assert_eq!(instance_snapshot.children, Vec::new());
}
#[test]
fn folder_in_folder() {
let mut imfs = Imfs::new(NoopFetcher);
let dir = ImfsSnapshot::dir(hashmap! {
"Child" => ImfsSnapshot::dir::<String>(HashMap::new()),
});
imfs.load_from_snapshot("/foo", dir);
let entry = imfs.get("/foo").unwrap();
let instance_snapshot = SnapshotDir::from_imfs(&mut imfs, &entry).unwrap().unwrap();
assert_eq!(instance_snapshot.name, "foo");
assert_eq!(instance_snapshot.class_name, "Folder");
assert_eq!(instance_snapshot.properties, HashMap::new());
assert_eq!(instance_snapshot.children.len(), 1);
let child = &instance_snapshot.children[0];
assert_eq!(child.name, "Child");
assert_eq!(child.class_name, "Folder");
assert_eq!(child.properties, HashMap::new());
assert_eq!(child.children, Vec::new());
}
}

View File

@@ -0,0 +1,99 @@
use std::{
fmt,
error::Error,
path::PathBuf,
};
use crate::{
snapshot::InstanceSnapshot,
};
pub type SnapshotResult<'a> = Result<Option<InstanceSnapshot<'a>>, SnapshotError>;
#[derive(Debug)]
pub struct SnapshotError {
detail: SnapshotErrorDetail,
path: Option<PathBuf>,
}
impl SnapshotError {
pub fn new(detail: SnapshotErrorDetail, path: Option<impl Into<PathBuf>>) -> Self {
SnapshotError {
detail,
path: path.map(Into::into),
}
}
pub(crate) fn file_did_not_exist(path: impl Into<PathBuf>) -> SnapshotError {
SnapshotError {
detail: SnapshotErrorDetail::FileDidNotExist,
path: Some(path.into()),
}
}
pub(crate) fn file_name_bad_unicode(path: impl Into<PathBuf>) -> SnapshotError {
SnapshotError {
detail: SnapshotErrorDetail::FileNameBadUnicode,
path: Some(path.into()),
}
}
pub(crate) fn file_contents_bad_unicode(
inner: std::str::Utf8Error,
path: impl Into<PathBuf>,
) -> SnapshotError {
SnapshotError {
detail: SnapshotErrorDetail::FileContentsBadUnicode {
inner,
},
path: Some(path.into()),
}
}
}
impl Error for SnapshotError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
self.detail.source()
}
}
impl fmt::Display for SnapshotError {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
match &self.path {
Some(path) => write!(formatter, "{} in path {}", self.detail, path.display()),
None => write!(formatter, "{}", self.detail),
}
}
}
#[derive(Debug)]
pub enum SnapshotErrorDetail {
FileDidNotExist,
FileNameBadUnicode,
FileContentsBadUnicode {
inner: std::str::Utf8Error,
},
}
impl SnapshotErrorDetail {
fn source(&self) -> Option<&(dyn Error + 'static)> {
use self::SnapshotErrorDetail::*;
match self {
FileContentsBadUnicode { inner } => Some(inner),
_ => None
}
}
}
impl fmt::Display for SnapshotErrorDetail {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
use self::SnapshotErrorDetail::*;
match self {
FileDidNotExist => write!(formatter, "file did not exist"),
FileNameBadUnicode => write!(formatter, "file name had malformed Unicode"),
FileContentsBadUnicode { inner } => write!(formatter, "file had malformed unicode: {}", inner),
}
}
}

View File

@@ -0,0 +1,181 @@
use std::{
borrow::Cow,
collections::HashMap,
};
use rbx_reflection::try_resolve_value;
use rbx_dom_weak::{RbxTree, RbxId, UnresolvedRbxValue};
use serde::{Deserialize};
use crate::{
imfs::new::{Imfs, ImfsFetcher, ImfsEntry},
snapshot::InstanceSnapshot,
};
use super::{
middleware::{SnapshotMiddleware, SnapshotInstanceResult, SnapshotFileResult},
};
pub struct SnapshotJsonModel;
impl SnapshotMiddleware for SnapshotJsonModel {
fn from_imfs<F: ImfsFetcher>(
imfs: &mut Imfs<F>,
entry: &ImfsEntry,
) -> SnapshotInstanceResult<'static> {
if entry.is_directory() {
return Ok(None);
}
let file_name = entry.path()
.file_name().unwrap().to_string_lossy();
let instance_name = match match_trailing(&file_name, ".model.json") {
Some(name) => name.to_owned(),
None => return Ok(None),
};
let instance: JsonModel = serde_json::from_slice(entry.contents(imfs)?)
.expect("TODO: Handle serde_json errors");
if let Some(json_name) = &instance.name {
if json_name != &instance_name {
log::warn!("Name from JSON model did not match its file name: {}", entry.path().display());
log::warn!("In Rojo < alpha 14, this model is named \"{}\" (from its 'Name' property)", json_name);
log::warn!("In Rojo >= alpha 14, this model is named \"{}\" (from its file name)", instance_name);
log::warn!("'Name' for the top-level instance in a JSON model is now optional and will be ignored.");
}
}
let snapshot = instance.core.into_snapshot(instance_name);
Ok(Some(snapshot))
}
fn from_instance(
_tree: &RbxTree,
_id: RbxId,
) -> SnapshotFileResult {
unimplemented!("Snapshotting models");
}
}
fn match_trailing<'a>(input: &'a str, trailer: &str) -> Option<&'a str> {
if input.ends_with(trailer) {
let end = input.len().saturating_sub(trailer.len());
Some(&input[..end])
} else {
None
}
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "PascalCase")]
struct JsonModel {
name: Option<String>,
#[serde(flatten)]
core: JsonModelCore,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "PascalCase")]
struct JsonModelInstance {
name: String,
#[serde(flatten)]
core: JsonModelCore,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "PascalCase")]
struct JsonModelCore {
class_name: String,
#[serde(default = "Vec::new", skip_serializing_if = "Vec::is_empty")]
children: Vec<JsonModelInstance>,
#[serde(default = "HashMap::new", skip_serializing_if = "HashMap::is_empty")]
properties: HashMap<String, UnresolvedRbxValue>,
}
impl JsonModelCore {
fn into_snapshot(self, name: String) -> InstanceSnapshot<'static> {
let class_name = self.class_name;
let children = self.children.into_iter()
.map(|child| child.core.into_snapshot(child.name))
.collect();
let properties = self.properties.into_iter()
.map(|(key, value)| {
try_resolve_value(&class_name, &key, &value)
.map(|resolved| (key, resolved))
})
.collect::<Result<HashMap<_, _>, _>>()
.expect("TODO: Handle rbx_reflection errors");
InstanceSnapshot {
snapshot_id: None,
name: Cow::Owned(name),
class_name: Cow::Owned(class_name),
properties,
children,
}
}
}
#[cfg(test)]
mod test {
use super::*;
use maplit::hashmap;
use rbx_dom_weak::RbxValue;
use crate::imfs::new::{ImfsSnapshot, NoopFetcher};
#[test]
fn model_from_imfs() {
let mut imfs = Imfs::new(NoopFetcher);
let file = ImfsSnapshot::file(r#"
{
"Name": "children",
"ClassName": "IntValue",
"Properties": {
"Value": 5
},
"Children": [
{
"Name": "The Child",
"ClassName": "StringValue"
}
]
}
"#);
imfs.load_from_snapshot("/foo.model.json", file);
let entry = imfs.get("/foo.model.json").unwrap();
let instance_snapshot = SnapshotJsonModel::from_imfs(&mut imfs, &entry).unwrap().unwrap();
assert_eq!(instance_snapshot, InstanceSnapshot {
snapshot_id: None,
name: Cow::Borrowed("foo"),
class_name: Cow::Borrowed("IntValue"),
properties: hashmap! {
"Value".to_owned() => RbxValue::Int32 {
value: 5,
},
},
children: vec![
InstanceSnapshot {
snapshot_id: None,
name: Cow::Borrowed("The Child"),
class_name: Cow::Borrowed("StringValue"),
properties: HashMap::new(),
children: Vec::new(),
},
],
});
}
}

View File

@@ -0,0 +1,173 @@
use std::{
borrow::Cow,
str,
};
use maplit::hashmap;
use rbx_dom_weak::{RbxTree, RbxValue, RbxId};
use crate::{
imfs::new::{Imfs, ImfsFetcher, ImfsEntry, FsResultExt},
snapshot::InstanceSnapshot,
};
use super::{
middleware::{SnapshotMiddleware, SnapshotInstanceResult, SnapshotFileResult},
};
pub struct SnapshotLua;
impl SnapshotMiddleware for SnapshotLua {
fn from_imfs<F: ImfsFetcher>(
imfs: &mut Imfs<F>,
entry: &ImfsEntry,
) -> SnapshotInstanceResult<'static> {
let file_name = entry.path()
.file_name().unwrap().to_string_lossy();
if entry.is_directory() {
let module_init_path = entry.path().join("init.lua");
if let Some(init_entry) = imfs.get(module_init_path).with_not_found()? {
if let Some(mut snapshot) = SnapshotLua::from_imfs(imfs, &init_entry)? {
snapshot.name = Cow::Owned(file_name.into_owned());
return Ok(Some(snapshot));
}
}
let server_init_path = entry.path().join("init.server.lua");
if let Some(init_entry) = imfs.get(server_init_path).with_not_found()? {
if let Some(mut snapshot) = SnapshotLua::from_imfs(imfs, &init_entry)? {
snapshot.name = Cow::Owned(file_name.into_owned());
return Ok(Some(snapshot));
}
}
let client_init_path = entry.path().join("init.client.lua");
if let Some(init_entry) = imfs.get(client_init_path).with_not_found()? {
if let Some(mut snapshot) = SnapshotLua::from_imfs(imfs, &init_entry)? {
snapshot.name = Cow::Owned(file_name.into_owned());
return Ok(Some(snapshot));
}
}
}
let (class_name, instance_name) = if let Some(name) = match_trailing(&file_name, ".server.lua") {
("Script", name)
} else if let Some(name) = match_trailing(&file_name, ".client.lua") {
("LocalScript", name)
} else if let Some(name) = match_trailing(&file_name, ".lua") {
("ModuleScript", name)
} else {
return Ok(None);
};
let contents = entry.contents(imfs)?;
let contents_str = str::from_utf8(contents)
.expect("File content was not valid UTF-8")
.to_string();
let properties = hashmap! {
"Source".to_owned() => RbxValue::String {
value: contents_str,
},
};
Ok(Some(InstanceSnapshot {
snapshot_id: None,
name: Cow::Owned(instance_name.to_owned()),
class_name: Cow::Borrowed(class_name),
properties,
children: Vec::new(),
}))
}
fn from_instance(
tree: &RbxTree,
id: RbxId,
) -> SnapshotFileResult {
let instance = tree.get_instance(id).unwrap();
match instance.class_name.as_str() {
"ModuleScript" | "LocalScript" | "Script" => unimplemented!("Snapshotting Script instances"),
_ => None,
}
}
}
fn match_trailing<'a>(input: &'a str, trailer: &str) -> Option<&'a str> {
if input.ends_with(trailer) {
let end = input.len().saturating_sub(trailer.len());
Some(&input[..end])
} else {
None
}
}
#[cfg(test)]
mod test {
use super::*;
use maplit::hashmap;
use crate::imfs::new::{ImfsSnapshot, NoopFetcher};
#[test]
fn module_from_imfs() {
let mut imfs = Imfs::new(NoopFetcher);
let file = ImfsSnapshot::file("Hello there!");
imfs.load_from_snapshot("/foo.lua", file);
let entry = imfs.get("/foo.lua").unwrap();
let instance_snapshot = SnapshotLua::from_imfs(&mut imfs, &entry).unwrap().unwrap();
assert_eq!(instance_snapshot.name, "foo");
assert_eq!(instance_snapshot.class_name, "ModuleScript");
assert_eq!(instance_snapshot.properties, hashmap! {
"Source".to_owned() => RbxValue::String {
value: "Hello there!".to_owned(),
},
});
}
#[test]
fn server_from_imfs() {
let mut imfs = Imfs::new(NoopFetcher);
let file = ImfsSnapshot::file("Hello there!");
imfs.load_from_snapshot("/foo.server.lua", file);
let entry = imfs.get("/foo.server.lua").unwrap();
let instance_snapshot = SnapshotLua::from_imfs(&mut imfs, &entry).unwrap().unwrap();
assert_eq!(instance_snapshot.name, "foo");
assert_eq!(instance_snapshot.class_name, "Script");
assert_eq!(instance_snapshot.properties, hashmap! {
"Source".to_owned() => RbxValue::String {
value: "Hello there!".to_owned(),
},
});
}
#[test]
fn client_from_imfs() {
let mut imfs = Imfs::new(NoopFetcher);
let file = ImfsSnapshot::file("Hello there!");
imfs.load_from_snapshot("/foo.client.lua", file);
let entry = imfs.get("/foo.client.lua").unwrap();
let instance_snapshot = SnapshotLua::from_imfs(&mut imfs, &entry).unwrap().unwrap();
assert_eq!(instance_snapshot.name, "foo");
assert_eq!(instance_snapshot.class_name, "LocalScript");
assert_eq!(instance_snapshot.properties, hashmap! {
"Source".to_owned() => RbxValue::String {
value: "Hello there!".to_owned(),
},
});
}
}

View File

@@ -0,0 +1,39 @@
use std::{
path::{PathBuf, Path},
};
use rbx_dom_weak::{RbxTree, RbxId};
use crate::{
imfs::{
FsResult,
new::{
Imfs,
ImfsEntry,
ImfsFetcher,
ImfsSnapshot,
},
},
snapshot::InstanceSnapshot,
};
pub type SnapshotInstanceResult<'a> = FsResult<Option<InstanceSnapshot<'a>>>;
pub type SnapshotFileResult = Option<(String, ImfsSnapshot)>;
pub trait SnapshotMiddleware {
fn from_imfs<F: ImfsFetcher>(
imfs: &mut Imfs<F>,
entry: &ImfsEntry,
) -> SnapshotInstanceResult<'static>;
fn from_instance(
tree: &RbxTree,
id: RbxId,
) -> SnapshotFileResult;
fn change_affects_paths(
path: &Path
) -> Vec<PathBuf> {
vec![path.to_path_buf()]
}
}

View File

@@ -0,0 +1,76 @@
//! Defines the semantics that Rojo uses to turn entries on the filesystem into
//! Roblox instances using the instance snapshot subsystem.
#![allow(dead_code)]
mod context;
mod csv;
mod dir;
mod error;
mod json_model;
mod lua;
mod middleware;
mod project;
mod rbxm;
mod rbxmx;
mod txt;
use rbx_dom_weak::{RbxTree, RbxId};
use crate::imfs::new::{Imfs, ImfsEntry, ImfsFetcher};
use self::{
middleware::{SnapshotInstanceResult, SnapshotFileResult, SnapshotMiddleware},
csv::SnapshotCsv,
dir::SnapshotDir,
json_model::SnapshotJsonModel,
lua::SnapshotLua,
project::SnapshotProject,
rbxm::SnapshotRbxm,
rbxmx::SnapshotRbxmx,
txt::SnapshotTxt,
};
macro_rules! middlewares {
( $($middleware: ident,)* ) => {
/// Generates a snapshot of instances from the given ImfsEntry.
pub fn snapshot_from_imfs<F: ImfsFetcher>(
imfs: &mut Imfs<F>,
entry: &ImfsEntry,
) -> SnapshotInstanceResult<'static> {
$(
log::trace!("trying middleware {} on {}", stringify!($middleware), entry.path().display());
if let Some(snapshot) = $middleware::from_imfs(imfs, entry)? {
log::trace!("middleware {} success on {}", stringify!($middleware), entry.path().display());
return Ok(Some(snapshot));
}
)*
log::trace!("no middleware returned Ok(Some)");
Ok(None)
}
/// Generates an in-memory filesystem snapshot of the given Roblox
/// instance.
pub fn snapshot_from_instance(tree: &RbxTree, id: RbxId) -> SnapshotFileResult {
$(
if let Some(result) = $middleware::from_instance(tree, id) {
return Some(result);
}
)*
None
}
};
}
middlewares! {
SnapshotProject,
SnapshotJsonModel,
SnapshotRbxmx,
SnapshotRbxm,
SnapshotLua,
SnapshotCsv,
SnapshotTxt,
SnapshotDir,
}

View File

@@ -0,0 +1,489 @@
use std::{
borrow::Cow,
collections::HashMap,
};
use rbx_dom_weak::{RbxTree, RbxId};
use rbx_reflection::try_resolve_value;
use crate::{
project::{Project, ProjectNode},
imfs::{
FsErrorKind,
new::{Imfs, ImfsFetcher, ImfsEntry},
},
snapshot::InstanceSnapshot,
};
use super::{
snapshot_from_imfs,
middleware::{SnapshotMiddleware, SnapshotInstanceResult, SnapshotFileResult},
};
pub struct SnapshotProject;
impl SnapshotMiddleware for SnapshotProject {
fn from_imfs<F: ImfsFetcher>(
imfs: &mut Imfs<F>,
entry: &ImfsEntry,
) -> SnapshotInstanceResult<'static> {
if entry.is_directory() {
let project_path = entry.path().join("default.project.json");
match imfs.get(project_path) {
Err(ref err) if err.kind() == FsErrorKind::NotFound => {}
Err(err) => return Err(err),
Ok(entry) => return SnapshotProject::from_imfs(imfs, &entry),
}
}
if !entry.path().to_string_lossy().ends_with(".project.json") {
return Ok(None)
}
let project = Project::load_from_slice(entry.contents(imfs)?, entry.path())
.expect("Invalid project file");
snapshot_project_node(&project.name, &project.tree, imfs)
}
fn from_instance(
_tree: &RbxTree,
_id: RbxId,
) -> SnapshotFileResult {
// TODO: Supporting turning instances into projects
None
}
}
fn snapshot_project_node<F: ImfsFetcher>(
instance_name: &str,
node: &ProjectNode,
imfs: &mut Imfs<F>,
) -> SnapshotInstanceResult<'static> {
assert!(node.ignore_unknown_instances.is_none(), "TODO: Support $ignoreUnknownInstances");
let name = Cow::Owned(instance_name.to_owned());
let mut class_name = node.class_name
.as_ref()
.map(|name| Cow::Owned(name.clone()));
let mut properties = HashMap::new();
let mut children = Vec::new();
if let Some(path) = &node.path {
let entry = imfs.get(path)?;
if let Some(snapshot) = snapshot_from_imfs(imfs, &entry)? {
// If a class name was already specified, then it'll override the
// class name of this snapshot ONLY if it's a Folder.
//
// This restriction is in place to prevent applying properties to
// instances that don't make sense. The primary use-case for using
// $className and $path at the same time is to use a directory as a
// service in a place file.
class_name = match class_name {
Some(class_name) => {
if snapshot.class_name == "Folder" {
Some(class_name)
} else {
// TODO: Turn this into an error object.
panic!("If $className and $path are specified, $path must yield an instance of class Folder");
}
}
None => Some(snapshot.class_name)
};
// Properties from the snapshot are pulled in unchanged, and
// overridden by properties set on the project node.
properties.reserve(snapshot.properties.len());
for (key, value) in snapshot.properties.into_iter() {
properties.insert(key, value);
}
// The snapshot's children will be merged with the children defined
// in the project node, if there are any.
children.reserve(snapshot.children.len());
for child in snapshot.children.into_iter() {
children.push(child);
}
} else {
// TODO: Should this issue an error instead?
log::warn!("$path referred to a path that could not be turned into an instance by Rojo");
}
}
let class_name = class_name
// TODO: Turn this into an error object.
.expect("$className or $path must be specified");
for (child_name, child_project_node) in &node.children {
if let Some(child) = snapshot_project_node(child_name, child_project_node, imfs)? {
children.push(child);
}
}
for (key, value) in &node.properties {
let resolved_value = try_resolve_value(&class_name, key, value)
.expect("TODO: Properly handle value resolution errors");
properties.insert(key.clone(), resolved_value);
}
Ok(Some(InstanceSnapshot {
snapshot_id: None,
name,
class_name,
properties,
children,
}))
}
#[cfg(test)]
mod test {
use super::*;
use rbx_dom_weak::RbxValue;
use maplit::hashmap;
use crate::imfs::new::{ImfsSnapshot, NoopFetcher};
#[test]
fn project_from_folder() {
let _ = env_logger::try_init();
let mut imfs = Imfs::new(NoopFetcher);
let dir = ImfsSnapshot::dir(hashmap! {
"default.project.json" => ImfsSnapshot::file(r#"
{
"name": "indirect-project",
"tree": {
"$className": "Folder"
}
}
"#),
});
imfs.load_from_snapshot("/foo", dir);
let entry = imfs.get("/foo").unwrap();
let instance_snapshot = SnapshotProject::from_imfs(&mut imfs, &entry)
.expect("snapshot error")
.expect("snapshot returned no instances");
assert_eq!(instance_snapshot.name, "indirect-project");
assert_eq!(instance_snapshot.class_name, "Folder");
assert_eq!(instance_snapshot.properties, HashMap::new());
assert_eq!(instance_snapshot.children, Vec::new());
}
#[test]
fn project_from_direct_file() {
let _ = env_logger::try_init();
let mut imfs = Imfs::new(NoopFetcher);
let dir = ImfsSnapshot::dir(hashmap! {
"hello.project.json" => ImfsSnapshot::file(r#"
{
"name": "direct-project",
"tree": {
"$className": "Model"
}
}
"#),
});
imfs.load_from_snapshot("/foo", dir);
let entry = imfs.get("/foo/hello.project.json").unwrap();
let instance_snapshot = SnapshotProject::from_imfs(&mut imfs, &entry)
.expect("snapshot error")
.expect("snapshot returned no instances");
assert_eq!(instance_snapshot.name, "direct-project");
assert_eq!(instance_snapshot.class_name, "Model");
assert_eq!(instance_snapshot.properties, HashMap::new());
assert_eq!(instance_snapshot.children, Vec::new());
}
#[test]
fn project_with_resolved_properties() {
let _ = env_logger::try_init();
let mut imfs = Imfs::new(NoopFetcher);
let dir = ImfsSnapshot::dir(hashmap! {
"default.project.json" => ImfsSnapshot::file(r#"
{
"name": "resolved-properties",
"tree": {
"$className": "StringValue",
"$properties": {
"Value": {
"Type": "String",
"Value": "Hello, world!"
}
}
}
}
"#),
});
imfs.load_from_snapshot("/foo", dir);
let entry = imfs.get("/foo").unwrap();
let instance_snapshot = SnapshotProject::from_imfs(&mut imfs, &entry)
.expect("snapshot error")
.expect("snapshot returned no instances");
assert_eq!(instance_snapshot.name, "resolved-properties");
assert_eq!(instance_snapshot.class_name, "StringValue");
assert_eq!(instance_snapshot.properties, hashmap! {
"Value".to_owned() => RbxValue::String {
value: "Hello, world!".to_owned(),
},
});
assert_eq!(instance_snapshot.children, Vec::new());
}
#[test]
fn project_with_unresolved_properties() {
let _ = env_logger::try_init();
let mut imfs = Imfs::new(NoopFetcher);
let dir = ImfsSnapshot::dir(hashmap! {
"default.project.json" => ImfsSnapshot::file(r#"
{
"name": "unresolved-properties",
"tree": {
"$className": "StringValue",
"$properties": {
"Value": "Hi!"
}
}
}
"#),
});
imfs.load_from_snapshot("/foo", dir);
let entry = imfs.get("/foo").unwrap();
let instance_snapshot = SnapshotProject::from_imfs(&mut imfs, &entry)
.expect("snapshot error")
.expect("snapshot returned no instances");
assert_eq!(instance_snapshot.name, "unresolved-properties");
assert_eq!(instance_snapshot.class_name, "StringValue");
assert_eq!(instance_snapshot.properties, hashmap! {
"Value".to_owned() => RbxValue::String {
value: "Hi!".to_owned(),
},
});
assert_eq!(instance_snapshot.children, Vec::new());
}
#[test]
fn project_with_children() {
let _ = env_logger::try_init();
let mut imfs = Imfs::new(NoopFetcher);
let dir = ImfsSnapshot::dir(hashmap! {
"default.project.json" => ImfsSnapshot::file(r#"
{
"name": "children",
"tree": {
"$className": "Folder",
"Child": {
"$className": "Model"
}
}
}
"#),
});
imfs.load_from_snapshot("/foo", dir);
let entry = imfs.get("/foo").unwrap();
let instance_snapshot = SnapshotProject::from_imfs(&mut imfs, &entry)
.expect("snapshot error")
.expect("snapshot returned no instances");
assert_eq!(instance_snapshot.name, "children");
assert_eq!(instance_snapshot.class_name, "Folder");
assert_eq!(instance_snapshot.properties, HashMap::new());
assert_eq!(instance_snapshot.children.len(), 1);
let child = &instance_snapshot.children[0];
assert_eq!(child.name, "Child");
assert_eq!(child.class_name, "Model");
assert_eq!(child.properties, HashMap::new());
assert_eq!(child.children, Vec::new());
}
#[test]
fn project_with_path_to_txt() {
let _ = env_logger::try_init();
let mut imfs = Imfs::new(NoopFetcher);
let dir = ImfsSnapshot::dir(hashmap! {
"default.project.json" => ImfsSnapshot::file(r#"
{
"name": "path-project",
"tree": {
"$path": "other.txt"
}
}
"#),
"other.txt" => ImfsSnapshot::file("Hello, world!"),
});
imfs.load_from_snapshot("/foo", dir);
let entry = imfs.get("/foo").unwrap();
let instance_snapshot = SnapshotProject::from_imfs(&mut imfs, &entry)
.expect("snapshot error")
.expect("snapshot returned no instances");
assert_eq!(instance_snapshot.name, "path-project");
assert_eq!(instance_snapshot.class_name, "StringValue");
assert_eq!(instance_snapshot.properties, hashmap! {
"Value".to_owned() => RbxValue::String {
value: "Hello, world!".to_owned(),
},
});
assert_eq!(instance_snapshot.children, Vec::new());
}
#[test]
fn project_with_path_to_project() {
let _ = env_logger::try_init();
let mut imfs = Imfs::new(NoopFetcher);
let dir = ImfsSnapshot::dir(hashmap! {
"default.project.json" => ImfsSnapshot::file(r#"
{
"name": "path-project",
"tree": {
"$path": "other.project.json"
}
}
"#),
"other.project.json" => ImfsSnapshot::file(r#"
{
"name": "other-project",
"tree": {
"$className": "Model"
}
}
"#),
});
imfs.load_from_snapshot("/foo", dir);
let entry = imfs.get("/foo").unwrap();
let instance_snapshot = SnapshotProject::from_imfs(&mut imfs, &entry)
.expect("snapshot error")
.expect("snapshot returned no instances");
assert_eq!(instance_snapshot.name, "path-project");
assert_eq!(instance_snapshot.class_name, "Model");
assert_eq!(instance_snapshot.properties, HashMap::new());
assert_eq!(instance_snapshot.children, Vec::new());
}
#[test]
fn project_with_path_to_project_with_children() {
let _ = env_logger::try_init();
let mut imfs = Imfs::new(NoopFetcher);
let dir = ImfsSnapshot::dir(hashmap! {
"default.project.json" => ImfsSnapshot::file(r#"
{
"name": "path-child-project",
"tree": {
"$path": "other.project.json"
}
}
"#),
"other.project.json" => ImfsSnapshot::file(r#"
{
"name": "other-project",
"tree": {
"$className": "Folder",
"SomeChild": {
"$className": "Model"
}
}
}
"#),
});
imfs.load_from_snapshot("/foo", dir);
let entry = imfs.get("/foo").unwrap();
let instance_snapshot = SnapshotProject::from_imfs(&mut imfs, &entry)
.expect("snapshot error")
.expect("snapshot returned no instances");
assert_eq!(instance_snapshot.name, "path-child-project");
assert_eq!(instance_snapshot.class_name, "Folder");
assert_eq!(instance_snapshot.properties, HashMap::new());
assert_eq!(instance_snapshot.children.len(), 1);
let child = &instance_snapshot.children[0];
assert_eq!(child.name, "SomeChild");
assert_eq!(child.class_name, "Model");
assert_eq!(child.properties, HashMap::new());
assert_eq!(child.children, Vec::new());
}
/// Ensures that if a property is defined both in the resulting instance
/// from $path and also in $properties, that the $properties value takes
/// precedence.
#[test]
fn project_path_property_overrides() {
let _ = env_logger::try_init();
let mut imfs = Imfs::new(NoopFetcher);
let dir = ImfsSnapshot::dir(hashmap! {
"default.project.json" => ImfsSnapshot::file(r#"
{
"name": "path-property-override",
"tree": {
"$path": "other.project.json",
"$properties": {
"Value": "Changed"
}
}
}
"#),
"other.project.json" => ImfsSnapshot::file(r#"
{
"name": "other-project",
"tree": {
"$className": "StringValue",
"$properties": {
"Value": "Original"
}
}
}
"#),
});
imfs.load_from_snapshot("/foo", dir);
let entry = imfs.get("/foo").unwrap();
let instance_snapshot = SnapshotProject::from_imfs(&mut imfs, &entry)
.expect("snapshot error")
.expect("snapshot returned no instances");
assert_eq!(instance_snapshot.name, "path-property-override");
assert_eq!(instance_snapshot.class_name, "StringValue");
assert_eq!(instance_snapshot.properties, hashmap! {
"Value".to_owned() => RbxValue::String {
value: "Changed".to_owned(),
},
});
assert_eq!(instance_snapshot.children, Vec::new());
}
}

View File

@@ -0,0 +1,96 @@
use std::{
borrow::Cow,
collections::HashMap,
};
use rbx_dom_weak::{RbxTree, RbxInstanceProperties, RbxId};
use crate::{
imfs::new::{Imfs, ImfsFetcher, ImfsEntry},
snapshot::InstanceSnapshot,
};
use super::{
middleware::{SnapshotMiddleware, SnapshotInstanceResult, SnapshotFileResult},
};
pub struct SnapshotRbxm;
impl SnapshotMiddleware for SnapshotRbxm {
fn from_imfs<F: ImfsFetcher>(
imfs: &mut Imfs<F>,
entry: &ImfsEntry,
) -> SnapshotInstanceResult<'static> {
if entry.is_directory() {
return Ok(None);
}
let file_name = entry.path()
.file_name().unwrap().to_string_lossy();
if !file_name.ends_with(".rbxm") {
return Ok(None);
}
let instance_name = entry.path()
.file_stem().expect("Could not extract file stem")
.to_string_lossy().to_string();
let mut temp_tree = RbxTree::new(RbxInstanceProperties {
name: "DataModel".to_owned(),
class_name: "DataModel".to_owned(),
properties: HashMap::new(),
});
let root_id = temp_tree.get_root_id();
rbx_binary::decode(&mut temp_tree, root_id, entry.contents(imfs)?)
.expect("TODO: Handle rbx_binary errors");
let root_instance = temp_tree.get_instance(root_id).unwrap();
let children = root_instance.get_children_ids();
if children.len() == 1 {
let mut snapshot = InstanceSnapshot::from_tree(&temp_tree, children[0]);
snapshot.name = Cow::Owned(instance_name);
Ok(Some(snapshot))
} else {
panic!("Rojo doesn't have support for model files with zero or more than one top-level instances yet.");
}
}
fn from_instance(
_tree: &RbxTree,
_id: RbxId,
) -> SnapshotFileResult {
unimplemented!("Snapshotting models");
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::imfs::new::{ImfsSnapshot, NoopFetcher};
#[test]
fn model_from_imfs() {
let mut imfs = Imfs::new(NoopFetcher);
let file = ImfsSnapshot::file(include_bytes!("../../assets/test-folder.rbxm").to_vec());
imfs.load_from_snapshot("/foo.rbxm", file);
let entry = imfs.get("/foo.rbxm").unwrap();
let instance_snapshot = SnapshotRbxm::from_imfs(&mut imfs, &entry).unwrap().unwrap();
assert_eq!(instance_snapshot.name, "foo");
assert_eq!(instance_snapshot.class_name, "Folder");
assert_eq!(instance_snapshot.children, Vec::new());
// We intentionally don't assert on properties. rbx_binary does not
// distinguish between String and BinaryString. The sample model was
// created by Roblox Studio and has an empty BinaryString "Tags"
// property that currently deserializes incorrectly.
// See: https://github.com/rojo-rbx/rbx-dom/issues/49
}
}

View File

@@ -0,0 +1,94 @@
use std::borrow::Cow;
use rbx_dom_weak::{RbxTree, RbxId};
use crate::{
imfs::new::{Imfs, ImfsFetcher, ImfsEntry},
snapshot::InstanceSnapshot,
};
use super::{
middleware::{SnapshotMiddleware, SnapshotInstanceResult, SnapshotFileResult},
};
pub struct SnapshotRbxmx;
impl SnapshotMiddleware for SnapshotRbxmx {
fn from_imfs<F: ImfsFetcher>(
imfs: &mut Imfs<F>,
entry: &ImfsEntry,
) -> SnapshotInstanceResult<'static> {
if entry.is_directory() {
return Ok(None);
}
let file_name = entry.path()
.file_name().unwrap().to_string_lossy();
if !file_name.ends_with(".rbxmx") {
return Ok(None);
}
let instance_name = entry.path()
.file_stem().expect("Could not extract file stem")
.to_string_lossy().to_string();
let options = rbx_xml::DecodeOptions::new()
.property_behavior(rbx_xml::DecodePropertyBehavior::ReadUnknown);
let temp_tree = rbx_xml::from_reader(entry.contents(imfs)?, options)
.expect("TODO: Handle rbx_xml errors");
let root_instance = temp_tree.get_instance(temp_tree.get_root_id()).unwrap();
let children = root_instance.get_children_ids();
if children.len() == 1 {
let mut snapshot = InstanceSnapshot::from_tree(&temp_tree, children[0]);
snapshot.name = Cow::Owned(instance_name);
Ok(Some(snapshot))
} else {
panic!("Rojo doesn't have support for model files with zero or more than one top-level instances yet.");
}
}
fn from_instance(
_tree: &RbxTree,
_id: RbxId,
) -> SnapshotFileResult {
unimplemented!("Snapshotting models");
}
}
#[cfg(test)]
mod test {
use super::*;
use std::collections::HashMap;
use crate::imfs::new::{ImfsSnapshot, NoopFetcher};
#[test]
fn model_from_imfs() {
let mut imfs = Imfs::new(NoopFetcher);
let file = ImfsSnapshot::file(r#"
<roblox version="4">
<Item class="Folder" referent="0">
<Properties>
<string name="Name">THIS NAME IS IGNORED</string>
</Properties>
</Item>
</roblox>
"#);
imfs.load_from_snapshot("/foo.rbxmx", file);
let entry = imfs.get("/foo.rbxmx").unwrap();
let instance_snapshot = SnapshotRbxmx::from_imfs(&mut imfs, &entry).unwrap().unwrap();
assert_eq!(instance_snapshot.name, "foo");
assert_eq!(instance_snapshot.class_name, "Folder");
assert_eq!(instance_snapshot.properties, HashMap::new());
assert_eq!(instance_snapshot.children, Vec::new());
}
}

View File

@@ -0,0 +1,147 @@
use std::{
borrow::Cow,
str,
};
use maplit::hashmap;
use rbx_dom_weak::{RbxTree, RbxValue, RbxId};
use crate::{
imfs::new::{Imfs, ImfsSnapshot, FileSnapshot, ImfsFetcher, ImfsEntry},
snapshot::InstanceSnapshot,
};
use super::{
middleware::{SnapshotMiddleware, SnapshotInstanceResult, SnapshotFileResult},
};
pub struct SnapshotTxt;
impl SnapshotMiddleware for SnapshotTxt {
fn from_imfs<F: ImfsFetcher>(
imfs: &mut Imfs<F>,
entry: &ImfsEntry,
) -> SnapshotInstanceResult<'static> {
if entry.is_directory() {
return Ok(None);
}
let extension = match entry.path().extension() {
Some(x) => x.to_str().unwrap(),
None => return Ok(None),
};
if extension != "txt" {
return Ok(None);
}
let instance_name = entry.path()
.file_stem().expect("Could not extract file stem")
.to_str().unwrap().to_string();
let contents = entry.contents(imfs)?;
let contents_str = str::from_utf8(contents)
.expect("File content was not valid UTF-8").to_string();
let properties = hashmap! {
"Value".to_owned() => RbxValue::String {
value: contents_str,
},
};
Ok(Some(InstanceSnapshot {
snapshot_id: None,
name: Cow::Owned(instance_name),
class_name: Cow::Borrowed("StringValue"),
properties,
children: Vec::new(),
}))
}
fn from_instance(
tree: &RbxTree,
id: RbxId,
) -> SnapshotFileResult {
let instance = tree.get_instance(id).unwrap();
if instance.class_name != "StringValue" {
return None;
}
if !instance.get_children_ids().is_empty() {
return None;
}
let value = match instance.properties.get("Value") {
Some(RbxValue::String { value }) => value.clone(),
Some(_) => panic!("wrong type ahh"),
None => String::new(),
};
let snapshot = ImfsSnapshot::File(FileSnapshot {
contents: value.into_bytes(),
});
let mut file_name = instance.name.clone();
file_name.push_str(".txt");
Some((file_name, snapshot))
}
}
#[cfg(test)]
mod test {
use super::*;
use maplit::hashmap;
use rbx_dom_weak::{RbxInstanceProperties};
use crate::imfs::new::NoopFetcher;
#[test]
fn instance_from_imfs() {
let mut imfs = Imfs::new(NoopFetcher);
let file = ImfsSnapshot::file("Hello there!");
imfs.load_from_snapshot("/foo.txt", file);
let entry = imfs.get("/foo.txt").unwrap();
let instance_snapshot = SnapshotTxt::from_imfs(&mut imfs, &entry).unwrap().unwrap();
assert_eq!(instance_snapshot.name, "foo");
assert_eq!(instance_snapshot.class_name, "StringValue");
assert_eq!(instance_snapshot.properties, hashmap! {
"Value".to_owned() => RbxValue::String {
value: "Hello there!".to_owned(),
},
});
}
#[test]
fn imfs_from_instance() {
let tree = RbxTree::new(string_value("Root", "Hello, world!"));
let root_id = tree.get_root_id();
let (_file_name, _file) = SnapshotTxt::from_instance(&tree, root_id).unwrap();
}
fn folder(name: impl Into<String>) -> RbxInstanceProperties {
RbxInstanceProperties {
name: name.into(),
class_name: "Folder".to_owned(),
properties: Default::default(),
}
}
fn string_value(name: impl Into<String>, value: impl Into<String>) -> RbxInstanceProperties {
RbxInstanceProperties {
name: name.into(),
class_name: "StringValue".to_owned(),
properties: hashmap! {
"Value".to_owned() => RbxValue::String {
value: value.into(),
},
},
}
}
}

View File

@@ -1,379 +0,0 @@
//! Defines the snapshot subsystem of Rojo, which defines a lightweight instance
//! representation (`RbxSnapshotInstance`) and a system to incrementally update
//! an `RbxTree` based on snapshots.
use std::{
borrow::Cow,
cmp::Ordering,
collections::{HashMap, HashSet},
fmt,
str,
};
use rbx_dom_weak::{RbxTree, RbxId, RbxInstanceProperties, RbxValue};
use serde::{Serialize, Deserialize};
use crate::{
path_map::PathMap,
rbx_session::MetadataPerInstance,
};
/// Contains all of the IDs that were modified when the snapshot reconciler
/// applied an update.
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct InstanceChanges {
pub added: HashSet<RbxId>,
pub removed: HashSet<RbxId>,
pub updated: HashSet<RbxId>,
}
impl fmt::Display for InstanceChanges {
fn fmt(&self, output: &mut fmt::Formatter) -> fmt::Result {
writeln!(output, "InstanceChanges {{")?;
if !self.added.is_empty() {
writeln!(output, " Added:")?;
for id in &self.added {
writeln!(output, " {}", id)?;
}
}
if !self.removed.is_empty() {
writeln!(output, " Removed:")?;
for id in &self.removed {
writeln!(output, " {}", id)?;
}
}
if !self.updated.is_empty() {
writeln!(output, " Updated:")?;
for id in &self.updated {
writeln!(output, " {}", id)?;
}
}
writeln!(output, "}}")
}
}
impl InstanceChanges {
pub fn is_empty(&self) -> bool {
self.added.is_empty() && self.removed.is_empty() && self.updated.is_empty()
}
}
/// A lightweight, hierarchical representation of an instance that can be
/// applied to the tree.
#[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize)]
pub struct RbxSnapshotInstance<'a> {
pub name: Cow<'a, str>,
pub class_name: Cow<'a, str>,
pub properties: HashMap<String, RbxValue>,
pub children: Vec<RbxSnapshotInstance<'a>>,
pub metadata: MetadataPerInstance,
}
impl<'a> RbxSnapshotInstance<'a> {
pub fn get_owned(&'a self) -> RbxSnapshotInstance<'static> {
let children: Vec<RbxSnapshotInstance<'static>> = self.children.iter()
.map(RbxSnapshotInstance::get_owned)
.collect();
RbxSnapshotInstance {
name: Cow::Owned(self.name.clone().into_owned()),
class_name: Cow::Owned(self.class_name.clone().into_owned()),
properties: self.properties.clone(),
children,
metadata: self.metadata.clone(),
}
}
}
impl<'a> PartialOrd for RbxSnapshotInstance<'a> {
fn partial_cmp(&self, other: &RbxSnapshotInstance) -> Option<Ordering> {
Some(self.name.cmp(&other.name)
.then(self.class_name.cmp(&other.class_name)))
}
}
/// Generates an `RbxSnapshotInstance` from an existing `RbxTree` and an ID to
/// use as the root of the snapshot.
///
/// This is used to transform instances created by rbx_xml and rbx_binary into
/// snapshots that can be applied to the tree to reduce instance churn.
pub fn snapshot_from_tree(tree: &RbxTree, id: RbxId) -> Option<RbxSnapshotInstance<'static>> {
let instance = tree.get_instance(id)?;
let mut children = Vec::new();
for &child_id in instance.get_children_ids() {
children.push(snapshot_from_tree(tree, child_id)?);
}
Some(RbxSnapshotInstance {
name: Cow::Owned(instance.name.to_owned()),
class_name: Cow::Owned(instance.class_name.to_owned()),
properties: instance.properties.clone(),
children,
metadata: MetadataPerInstance {
source_path: None,
ignore_unknown_instances: false,
project_definition: None,
},
})
}
/// Constructs a new `RbxTree` out of a snapshot and places to attach metadata.
pub fn reify_root(
snapshot: &RbxSnapshotInstance,
instance_per_path: &mut PathMap<HashSet<RbxId>>,
metadata_per_instance: &mut HashMap<RbxId, MetadataPerInstance>,
changes: &mut InstanceChanges,
) -> RbxTree {
let instance = reify_core(snapshot);
let mut tree = RbxTree::new(instance);
let id = tree.get_root_id();
reify_metadata(snapshot, id, instance_per_path, metadata_per_instance);
changes.added.insert(id);
for child in &snapshot.children {
reify_subtree(child, &mut tree, id, instance_per_path, metadata_per_instance, changes);
}
tree
}
/// Adds instances to a portion of the given `RbxTree`, used for when new
/// instances are created.
pub fn reify_subtree(
snapshot: &RbxSnapshotInstance,
tree: &mut RbxTree,
parent_id: RbxId,
instance_per_path: &mut PathMap<HashSet<RbxId>>,
metadata_per_instance: &mut HashMap<RbxId, MetadataPerInstance>,
changes: &mut InstanceChanges,
) -> RbxId {
let instance = reify_core(snapshot);
let id = tree.insert_instance(instance, parent_id);
reify_metadata(snapshot, id, instance_per_path, metadata_per_instance);
changes.added.insert(id);
for child in &snapshot.children {
reify_subtree(child, tree, id, instance_per_path, metadata_per_instance, changes);
}
id
}
fn reify_metadata(
snapshot: &RbxSnapshotInstance,
instance_id: RbxId,
instance_per_path: &mut PathMap<HashSet<RbxId>>,
metadata_per_instance: &mut HashMap<RbxId, MetadataPerInstance>,
) {
if let Some(source_path) = &snapshot.metadata.source_path {
let path_metadata = match instance_per_path.get_mut(&source_path) {
Some(v) => v,
None => {
instance_per_path.insert(source_path.clone(), Default::default());
instance_per_path.get_mut(&source_path).unwrap()
},
};
path_metadata.insert(instance_id);
}
metadata_per_instance.insert(instance_id, snapshot.metadata.clone());
}
/// Updates existing instances in an existing `RbxTree`, potentially adding,
/// updating, or removing children and properties.
pub fn reconcile_subtree(
tree: &mut RbxTree,
id: RbxId,
snapshot: &RbxSnapshotInstance,
instance_per_path: &mut PathMap<HashSet<RbxId>>,
metadata_per_instance: &mut HashMap<RbxId, MetadataPerInstance>,
changes: &mut InstanceChanges,
) {
reify_metadata(snapshot, id, instance_per_path, metadata_per_instance);
if reconcile_instance_properties(tree.get_instance_mut(id).unwrap(), snapshot) {
changes.updated.insert(id);
}
reconcile_instance_children(tree, id, snapshot, instance_per_path, metadata_per_instance, changes);
}
fn reify_core(snapshot: &RbxSnapshotInstance) -> RbxInstanceProperties {
let mut properties = HashMap::new();
for (key, value) in &snapshot.properties {
properties.insert(key.clone(), value.clone());
}
let instance = RbxInstanceProperties {
name: snapshot.name.to_string(),
class_name: snapshot.class_name.to_string(),
properties,
};
instance
}
/// Updates the given instance to match the properties defined on the snapshot.
///
/// Returns whether any changes were applied.
fn reconcile_instance_properties(instance: &mut RbxInstanceProperties, snapshot: &RbxSnapshotInstance) -> bool {
let mut has_diffs = false;
if instance.name != snapshot.name {
instance.name = snapshot.name.to_string();
has_diffs = true;
}
if instance.class_name != snapshot.class_name {
instance.class_name = snapshot.class_name.to_string();
has_diffs = true;
}
let mut property_updates = HashMap::new();
for (key, instance_value) in &instance.properties {
match snapshot.properties.get(key) {
Some(snapshot_value) => {
if snapshot_value != instance_value {
property_updates.insert(key.clone(), Some(snapshot_value.clone()));
}
},
None => {
property_updates.insert(key.clone(), None);
},
}
}
for (key, snapshot_value) in &snapshot.properties {
if property_updates.contains_key(key) {
continue;
}
match instance.properties.get(key) {
Some(instance_value) => {
if snapshot_value != instance_value {
property_updates.insert(key.clone(), Some(snapshot_value.clone()));
}
},
None => {
property_updates.insert(key.clone(), Some(snapshot_value.clone()));
},
}
}
has_diffs = has_diffs || !property_updates.is_empty();
for (key, change) in property_updates.drain() {
match change {
Some(value) => instance.properties.insert(key, value),
None => instance.properties.remove(&key),
};
}
has_diffs
}
/// Updates the children of the instance in the `RbxTree` to match the children
/// of the `RbxSnapshotInstance`. Order will be updated to match.
fn reconcile_instance_children(
tree: &mut RbxTree,
id: RbxId,
snapshot: &RbxSnapshotInstance,
instance_per_path: &mut PathMap<HashSet<RbxId>>,
metadata_per_instance: &mut HashMap<RbxId, MetadataPerInstance>,
changes: &mut InstanceChanges,
) {
// These lists are kept so that we can apply all the changes we figure out
let mut children_to_maybe_update: Vec<(RbxId, &RbxSnapshotInstance)> = Vec::new();
let mut children_to_add: Vec<(usize, &RbxSnapshotInstance)> = Vec::new();
let mut children_to_remove: Vec<RbxId> = Vec::new();
// This map is used once we're done mutating children to sort them according
// to the order specified in the snapshot. Without it, a snapshot with a new
// child prepended will cause the RbxTree instance to have out-of-order
// children and would make Rojo non-deterministic.
let mut ids_to_snapshot_indices = HashMap::new();
// Since we have to enumerate the children of both the RbxTree instance and
// our snapshot, we keep a set of the snapshot children we've seen.
let mut visited_snapshot_indices = vec![false; snapshot.children.len()];
let children_ids = tree.get_instance(id).unwrap().get_children_ids();
// Find all instances that were removed or updated, which we derive by
// trying to pair up existing instances to snapshots.
for &child_id in children_ids {
let child_instance = tree.get_instance(child_id).unwrap();
// Locate a matching snapshot for this instance
let mut matching_snapshot = None;
for (snapshot_index, child_snapshot) in snapshot.children.iter().enumerate() {
if visited_snapshot_indices[snapshot_index] {
continue;
}
// We assume that instances with the same name are probably pretty
// similar. This heuristic is similar to React's reconciliation
// strategy.
if child_snapshot.name == child_instance.name {
ids_to_snapshot_indices.insert(child_id, snapshot_index);
visited_snapshot_indices[snapshot_index] = true;
matching_snapshot = Some(child_snapshot);
break;
}
}
match matching_snapshot {
Some(child_snapshot) => {
children_to_maybe_update.push((child_instance.get_id(), child_snapshot));
}
None => {
children_to_remove.push(child_instance.get_id());
}
}
}
// Find all instancs that were added, which is just the snapshots we didn't
// match up to existing instances above.
for (snapshot_index, child_snapshot) in snapshot.children.iter().enumerate() {
if !visited_snapshot_indices[snapshot_index] {
children_to_add.push((snapshot_index, child_snapshot));
}
}
// Apply all of our removals we gathered from our diff
for child_id in &children_to_remove {
if let Some(subtree) = tree.remove_instance(*child_id) {
for id in subtree.iter_all_ids() {
metadata_per_instance.remove(&id);
changes.removed.insert(id);
}
}
}
// Apply all of our children additions
for (snapshot_index, child_snapshot) in &children_to_add {
let id = reify_subtree(child_snapshot, tree, id, instance_per_path, metadata_per_instance, changes);
ids_to_snapshot_indices.insert(id, *snapshot_index);
}
// Apply any updates that might have updates
for (child_id, child_snapshot) in &children_to_maybe_update {
reconcile_subtree(tree, *child_id, child_snapshot, instance_per_path, metadata_per_instance, changes);
}
// Apply the sort mapping defined by ids_to_snapshot_indices above
let instance = tree.get_instance_mut(id).unwrap();
instance.sort_children_unstable_by_key(|id| ids_to_snapshot_indices.get(&id).unwrap());
}

View File

@@ -1,174 +0,0 @@
use std::{
collections::HashMap,
fmt,
io::Write,
path::Path,
process::{Command, Stdio},
};
use log::warn;
use rbx_dom_weak::{RbxTree, RbxId};
use crate::{
imfs::{Imfs, ImfsItem},
rbx_session::RbxSession,
web::api::PublicInstanceMetadata,
rbx_session::MetadataPerInstance,
};
static GRAPHVIZ_HEADER: &str = r#"
digraph RojoTree {
rankdir = "LR";
graph [
ranksep = "0.7",
nodesep = "0.5",
];
node [
fontname = "Hack",
shape = "record",
];
"#;
/// Compiles DOT source to SVG by invoking dot on the command line.
pub fn graphviz_to_svg(source: &str) -> Option<String> {
let command = Command::new("dot")
.arg("-Tsvg")
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.spawn();
let mut child = match command {
Ok(child) => child,
Err(_) => {
warn!("Failed to spawn GraphViz process to visualize current state.");
warn!("If you want pretty graphs, install GraphViz and make sure 'dot' is on your PATH!");
return None;
},
};
{
let stdin = child.stdin.as_mut().expect("Failed to open stdin");
stdin.write_all(source.as_bytes()).expect("Failed to write to stdin");
}
let output = child.wait_with_output().expect("Failed to read stdout");
Some(String::from_utf8(output.stdout).expect("Failed to parse stdout as UTF-8"))
}
pub struct VisualizeRbxTree<'a, 'b> {
pub tree: &'a RbxTree,
pub metadata: &'b HashMap<RbxId, MetadataPerInstance>,
}
impl<'a, 'b> fmt::Display for VisualizeRbxTree<'a, 'b> {
fn fmt(&self, output: &mut fmt::Formatter) -> fmt::Result {
writeln!(output, "{}", GRAPHVIZ_HEADER)?;
visualize_instance(&self.tree, self.tree.get_root_id(), &self.metadata, output)?;
writeln!(output, "}}")
}
}
/// A Display wrapper struct to visualize an RbxSession as SVG.
pub struct VisualizeRbxSession<'a>(pub &'a RbxSession);
impl<'a> fmt::Display for VisualizeRbxSession<'a> {
fn fmt(&self, output: &mut fmt::Formatter) -> fmt::Result {
writeln!(output, "{}", VisualizeRbxTree {
tree: self.0.get_tree(),
metadata: self.0.get_all_instance_metadata(),
})
}
}
fn visualize_instance(
tree: &RbxTree,
id: RbxId,
metadata: &HashMap<RbxId, MetadataPerInstance>,
output: &mut fmt::Formatter,
) -> fmt::Result {
let instance = tree.get_instance(id).unwrap();
let mut instance_label = format!("{}|{}|{}", instance.name, instance.class_name, id);
if let Some(session_metadata) = metadata.get(&id) {
let metadata = PublicInstanceMetadata::from_session_metadata(session_metadata);
instance_label.push('|');
instance_label.push_str(&serde_json::to_string(&metadata).unwrap());
}
instance_label = instance_label
.replace("\"", "&quot;")
.replace("{", "\\{")
.replace("}", "\\}");
writeln!(output, " \"{}\" [label=\"{}\"]", id, instance_label)?;
for &child_id in instance.get_children_ids() {
writeln!(output, " \"{}\" -> \"{}\"", id, child_id)?;
visualize_instance(tree, child_id, metadata, output)?;
}
Ok(())
}
/// A Display wrapper struct to visualize an Imfs as SVG.
pub struct VisualizeImfs<'a>(pub &'a Imfs);
impl<'a> fmt::Display for VisualizeImfs<'a> {
fn fmt(&self, output: &mut fmt::Formatter) -> fmt::Result {
writeln!(output, "{}", GRAPHVIZ_HEADER)?;
for root_path in self.0.get_roots() {
visualize_root_path(self.0, root_path, output)?;
}
writeln!(output, "}}")?;
Ok(())
}
}
fn normalize_name(path: &Path) -> String {
path.to_str().unwrap().replace("\\", "/")
}
fn visualize_root_path(imfs: &Imfs, path: &Path, output: &mut fmt::Formatter) -> fmt::Result {
let normalized_name = normalize_name(path);
let item = imfs.get(path).unwrap();
writeln!(output, " \"{}\"", normalized_name)?;
match item {
ImfsItem::File(_) => {},
ImfsItem::Directory(directory) => {
for child_path in &directory.children {
writeln!(output, " \"{}\" -> \"{}\"", normalized_name, normalize_name(child_path))?;
visualize_path(imfs, child_path, output)?;
}
},
}
Ok(())
}
fn visualize_path(imfs: &Imfs, path: &Path, output: &mut fmt::Formatter) -> fmt::Result {
let normalized_name = normalize_name(path);
let short_name = path.file_name().unwrap().to_string_lossy();
let item = imfs.get(path).unwrap();
writeln!(output, " \"{}\" [label = \"{}\"]", normalized_name, short_name)?;
match item {
ImfsItem::File(_) => {},
ImfsItem::Directory(directory) => {
for child_path in &directory.children {
writeln!(output, " \"{}\" -> \"{}\"", normalized_name, normalize_name(child_path))?;
visualize_path(imfs, child_path, output)?;
}
},
}
Ok(())
}

View File

@@ -2,16 +2,15 @@
//! JSON. //! JSON.
use std::{ use std::{
borrow::Cow, collections::HashSet,
collections::{HashMap, HashSet},
sync::Arc, sync::Arc,
}; };
use futures::{ use futures::{
future::{self, IntoFuture}, future,
Future, Future,
sync::oneshot,
}; };
use hyper::{ use hyper::{
service::Service, service::Service,
header, header,
@@ -22,43 +21,15 @@ use hyper::{
Response, Response,
}; };
use serde::{Serialize, Deserialize}; use serde::{Serialize, Deserialize};
use rbx_dom_weak::{RbxId, RbxInstance}; use rbx_dom_weak::RbxId;
use crate::{ use crate::{
live_session::LiveSession, serve_session::ServeSession,
session_id::SessionId, session_id::SessionId,
snapshot_reconciler::InstanceChanges,
rbx_session::{MetadataPerInstance},
}; };
/// Contains the instance metadata relevant to Rojo clients. const SERVER_VERSION: &str = env!("CARGO_PKG_VERSION");
#[derive(Debug, Serialize, Deserialize)] const PROTOCOL_VERSION: u64 = 3;
#[serde(rename_all = "camelCase")]
pub struct PublicInstanceMetadata {
ignore_unknown_instances: bool,
}
impl PublicInstanceMetadata {
pub fn from_session_metadata(meta: &MetadataPerInstance) -> PublicInstanceMetadata {
PublicInstanceMetadata {
ignore_unknown_instances: meta.ignore_unknown_instances,
}
}
}
/// Used to attach metadata specific to Rojo to instances, which come from the
/// rbx_dom_weak crate.
///
/// Both fields are wrapped in Cow in order to make owned-vs-borrowed simpler
/// for tests.
#[derive(Debug, Serialize, Deserialize)]
pub struct InstanceWithMetadata<'a> {
#[serde(flatten)]
pub instance: Cow<'a, RbxInstance>,
#[serde(rename = "Metadata")]
pub metadata: Option<PublicInstanceMetadata>,
}
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
@@ -67,23 +38,23 @@ pub struct ServerInfoResponse<'a> {
pub server_version: &'a str, pub server_version: &'a str,
pub protocol_version: u64, pub protocol_version: u64,
pub expected_place_ids: Option<HashSet<u64>>, pub expected_place_ids: Option<HashSet<u64>>,
pub root_instance_id: RbxId, // pub root_instance_id: RbxId,
} }
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct ReadResponse<'a> { pub struct ReadResponse {
pub session_id: SessionId, pub session_id: SessionId,
pub message_cursor: u32, // pub message_cursor: u32,
pub instances: HashMap<RbxId, InstanceWithMetadata<'a>>, // pub instances: HashMap<RbxId, InstanceWithMetadata<'a>>,
} }
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct SubscribeResponse<'a> { pub struct SubscribeResponse {
pub session_id: SessionId, pub session_id: SessionId,
pub message_cursor: u32, // pub message_cursor: u32,
pub messages: Cow<'a, [InstanceChanges]>, // pub messages: Cow<'a, [InstanceChanges]>,
} }
fn response_json<T: serde::Serialize>(value: T) -> Response<Body> { fn response_json<T: serde::Serialize>(value: T) -> Response<Body> {
@@ -91,11 +62,11 @@ fn response_json<T: serde::Serialize>(value: T) -> Response<Body> {
Ok(v) => v, Ok(v) => v,
Err(err) => { Err(err) => {
return Response::builder() return Response::builder()
.status(StatusCode::BAD_REQUEST) .status(StatusCode::INTERNAL_SERVER_ERROR)
.header(header::CONTENT_TYPE, "text/plain") .header(header::CONTENT_TYPE, "text/plain")
.body(Body::from(err.to_string())) .body(Body::from(err.to_string()))
.unwrap(); .unwrap();
}, }
}; };
Response::builder() Response::builder()
@@ -105,8 +76,7 @@ fn response_json<T: serde::Serialize>(value: T) -> Response<Body> {
} }
pub struct ApiService { pub struct ApiService {
live_session: Arc<LiveSession>, serve_session: Arc<ServeSession>,
server_version: &'static str,
} }
impl Service for ApiService { impl Service for ApiService {
@@ -135,24 +105,19 @@ impl Service for ApiService {
} }
impl ApiService { impl ApiService {
pub fn new(live_session: Arc<LiveSession>) -> ApiService { pub fn new(serve_session: Arc<ServeSession>) -> ApiService {
ApiService { ApiService {
live_session, serve_session,
server_version: env!("CARGO_PKG_VERSION"),
} }
} }
/// Get a summary of information about the server /// Get a summary of information about the server
fn handle_api_rojo(&self) -> Response<Body> { fn handle_api_rojo(&self) -> Response<Body> {
let rbx_session = self.live_session.rbx_session.lock().unwrap();
let tree = rbx_session.get_tree();
response_json(&ServerInfoResponse { response_json(&ServerInfoResponse {
server_version: self.server_version, server_version: SERVER_VERSION,
protocol_version: 2, protocol_version: PROTOCOL_VERSION,
session_id: self.live_session.session_id(), session_id: self.serve_session.session_id(),
expected_place_ids: self.live_session.serve_place_ids().clone(), expected_place_ids: self.serve_session.serve_place_ids().map(Clone::clone),
root_instance_id: tree.get_root_id(),
}) })
} }
@@ -160,7 +125,7 @@ impl ApiService {
/// there weren't any, subscribe to receive any new messages. /// there weren't any, subscribe to receive any new messages.
fn handle_api_subscribe(&self, request: Request<Body>) -> <ApiService as Service>::Future { fn handle_api_subscribe(&self, request: Request<Body>) -> <ApiService as Service>::Future {
let argument = &request.uri().path()["/api/subscribe/".len()..]; let argument = &request.uri().path()["/api/subscribe/".len()..];
let cursor: u32 = match argument.parse() { let _cursor: u32 = match argument.parse() {
Ok(v) => v, Ok(v) => v,
Err(err) => { Err(err) => {
return Box::new(future::ok(Response::builder() return Box::new(future::ok(Response::builder()
@@ -171,28 +136,9 @@ impl ApiService {
}, },
}; };
let message_queue = Arc::clone(&self.live_session.message_queue); Box::new(future::ok(response_json(SubscribeResponse {
let session_id = self.live_session.session_id(); session_id: self.serve_session.session_id(),
})))
let (tx, rx) = oneshot::channel();
message_queue.subscribe(cursor, tx);
let result = rx.into_future()
.and_then(move |(new_cursor, new_messages)| {
Box::new(future::ok(response_json(SubscribeResponse {
session_id: session_id,
messages: Cow::Owned(new_messages),
message_cursor: new_cursor,
})))
})
.or_else(|e| {
Box::new(future::ok(Response::builder()
.status(500)
.body(Body::from(format!("Internal Error: {:?}", e)))
.unwrap()))
});
Box::new(result)
} }
fn handle_api_read(&self, request: Request<Body>) -> Response<Body> { fn handle_api_read(&self, request: Request<Body>) -> Response<Body> {
@@ -202,9 +148,7 @@ impl ApiService {
.map(RbxId::parse_str) .map(RbxId::parse_str)
.collect(); .collect();
let message_queue = Arc::clone(&self.live_session.message_queue); let _requested_ids = match requested_ids {
let requested_ids = match requested_ids {
Some(id) => id, Some(id) => id,
None => { None => {
return Response::builder() return Response::builder()
@@ -215,39 +159,8 @@ impl ApiService {
}, },
}; };
let rbx_session = self.live_session.rbx_session.lock().unwrap(); response_json(ReadResponse {
let tree = rbx_session.get_tree(); session_id: self.serve_session.session_id(),
let message_cursor = message_queue.get_message_cursor();
let mut instances = HashMap::new();
for &requested_id in &requested_ids {
if let Some(instance) = tree.get_instance(requested_id) {
let metadata = rbx_session.get_instance_metadata(requested_id)
.map(PublicInstanceMetadata::from_session_metadata);
instances.insert(instance.get_id(), InstanceWithMetadata {
instance: Cow::Borrowed(instance),
metadata,
});
for descendant in tree.descendants(requested_id) {
let descendant_meta = rbx_session.get_instance_metadata(descendant.get_id())
.map(PublicInstanceMetadata::from_session_metadata);
instances.insert(descendant.get_id(), InstanceWithMetadata {
instance: Cow::Borrowed(descendant),
metadata: descendant_meta,
});
}
}
}
response_json(&ReadResponse {
session_id: self.live_session.session_id(),
message_cursor,
instances,
}) })
} }
} }

View File

@@ -14,16 +14,14 @@ use hyper::{
}; };
use ritz::html; use ritz::html;
use crate::{ use crate::serve_session::ServeSession;
live_session::LiveSession,
visualize::{VisualizeRbxSession, VisualizeImfs, graphviz_to_svg},
};
const SERVER_VERSION: &str = env!("CARGO_PKG_VERSION");
static HOME_CSS: &str = include_str!("../../assets/index.css"); static HOME_CSS: &str = include_str!("../../assets/index.css");
pub struct InterfaceService { pub struct InterfaceService {
live_session: Arc<LiveSession>, #[allow(unused)] // TODO: Fill out interface service
server_version: &'static str, serve_session: Arc<ServeSession>,
} }
impl Service for InterfaceService { impl Service for InterfaceService {
@@ -48,10 +46,9 @@ impl Service for InterfaceService {
} }
impl InterfaceService { impl InterfaceService {
pub fn new(live_session: Arc<LiveSession>) -> InterfaceService { pub fn new(serve_session: Arc<ServeSession>) -> InterfaceService {
InterfaceService { InterfaceService {
live_session, serve_session,
server_version: env!("CARGO_PKG_VERSION"),
} }
} }
@@ -71,7 +68,7 @@ impl InterfaceService {
"Rojo Live Sync is up and running!" "Rojo Live Sync is up and running!"
</h1> </h1>
<h2 class="subtitle"> <h2 class="subtitle">
"Version " { self.server_version } "Version " { SERVER_VERSION }
</h2> </h2>
<a class="docs" href="https://lpghatguy.github.io/rojo"> <a class="docs" href="https://lpghatguy.github.io/rojo">
"Rojo Documentation" "Rojo Documentation"
@@ -88,34 +85,16 @@ impl InterfaceService {
} }
fn handle_visualize_rbx(&self) -> Response<Body> { fn handle_visualize_rbx(&self) -> Response<Body> {
let rbx_session = self.live_session.rbx_session.lock().unwrap(); Response::builder()
let dot_source = format!("{}", VisualizeRbxSession(&rbx_session)); .header(header::CONTENT_TYPE, "text/plain")
.body(Body::from("TODO: /visualize/rbx"))
match graphviz_to_svg(&dot_source) { .unwrap()
Some(svg) => Response::builder()
.header(header::CONTENT_TYPE, "image/svg+xml")
.body(Body::from(svg))
.unwrap(),
None => Response::builder()
.header(header::CONTENT_TYPE, "text/plain")
.body(Body::from(dot_source))
.unwrap(),
}
} }
fn handle_visualize_imfs(&self) -> Response<Body> { fn handle_visualize_imfs(&self) -> Response<Body> {
let imfs = self.live_session.imfs.lock().unwrap(); Response::builder()
let dot_source = format!("{}", VisualizeImfs(&imfs)); .header(header::CONTENT_TYPE, "text/plain")
.body(Body::from("TODO: /visualize/imfs"))
match graphviz_to_svg(&dot_source) { .unwrap()
Some(svg) => Response::builder()
.header(header::CONTENT_TYPE, "image/svg+xml")
.body(Body::from(svg))
.unwrap(),
None => Response::builder()
.header(header::CONTENT_TYPE, "text/plain")
.body(Body::from(dot_source))
.unwrap(),
}
} }
} }

View File

@@ -1,6 +1,4 @@
// TODO: This module needs to be public for visualize, we should move mod api;
// PublicInstanceMetadata and switch this private!
pub mod api;
mod interface; mod interface;
use std::sync::Arc; use std::sync::Arc;
@@ -18,9 +16,7 @@ use hyper::{
Server, Server,
}; };
use crate::{ use crate::serve_session::ServeSession;
live_session::LiveSession,
};
use self::{ use self::{
api::ApiService, api::ApiService,
@@ -50,22 +46,22 @@ impl Service for RootService {
} }
impl RootService { impl RootService {
pub fn new(live_session: Arc<LiveSession>) -> RootService { pub fn new(serve_session: Arc<ServeSession>) -> RootService {
RootService { RootService {
api: ApiService::new(Arc::clone(&live_session)), api: ApiService::new(Arc::clone(&serve_session)),
interface: InterfaceService::new(Arc::clone(&live_session)), interface: InterfaceService::new(Arc::clone(&serve_session)),
} }
} }
} }
pub struct LiveServer { pub struct LiveServer {
live_session: Arc<LiveSession>, serve_session: Arc<ServeSession>,
} }
impl LiveServer { impl LiveServer {
pub fn new(live_session: Arc<LiveSession>) -> LiveServer { pub fn new(serve_session: Arc<ServeSession>) -> LiveServer {
LiveServer { LiveServer {
live_session, serve_session,
} }
} }
@@ -75,7 +71,7 @@ impl LiveServer {
let server = Server::bind(&address) let server = Server::bind(&address)
.serve(move || { .serve(move || {
let service: FutureResult<_, hyper::Error> = let service: FutureResult<_, hyper::Error> =
future::ok(RootService::new(Arc::clone(&self.live_session))); future::ok(RootService::new(Arc::clone(&self.serve_session)));
service service
}) })
.map_err(|e| eprintln!("Server error: {}", e)); .map_err(|e| eprintln!("Server error: {}", e));

View File

@@ -1,350 +0,0 @@
use std::{
collections::{HashMap, HashSet, BTreeSet},
fs,
path::PathBuf,
};
use failure::Error;
use tempfile::{TempDir, tempdir};
use librojo::{
imfs::{Imfs, ImfsItem, ImfsFile, ImfsDirectory},
};
#[allow(unused)]
enum FsEvent {
Created(PathBuf),
Updated(PathBuf),
Removed(PathBuf),
Moved(PathBuf, PathBuf),
}
fn send_events(imfs: &mut Imfs, events: &[FsEvent]) -> Result<(), Error> {
for event in events {
match event {
FsEvent::Created(path) => imfs.path_created(path)?,
FsEvent::Updated(path) => imfs.path_updated(path)?,
FsEvent::Removed(path) => imfs.path_removed(path)?,
FsEvent::Moved(from, to) => imfs.path_moved(from, to)?,
}
}
Ok(())
}
#[derive(Debug, Clone, PartialEq)]
struct ExpectedImfs {
roots: HashSet<PathBuf>,
items: HashMap<PathBuf, ImfsItem>,
}
struct TestResources {
foo_path: PathBuf,
bar_path: PathBuf,
baz_path: PathBuf,
}
fn check_expected(real: &Imfs, expected: &ExpectedImfs) {
assert_eq!(real.get_roots(), &expected.roots);
let real_items = real.get_items();
if real_items != &expected.items {
let real_str = serde_json::to_string(real_items).unwrap();
let expected_str = serde_json::to_string(&expected.items).unwrap();
panic!("Items differed!\nReal:\n{}\nExpected:\n{}\n", real_str, expected_str);
}
}
fn base_tree() -> Result<(TempDir, Imfs, ExpectedImfs, TestResources), Error> {
let root = tempdir()?;
let foo_path = root.path().join("foo");
let bar_path = root.path().join("bar.txt");
let baz_path = foo_path.join("baz.txt");
let resources = TestResources {
foo_path: foo_path.clone(),
bar_path: bar_path.clone(),
baz_path: baz_path.clone(),
};
fs::create_dir(&foo_path)?;
fs::write(&bar_path, b"bar")?;
fs::write(&baz_path, b"baz")?;
let mut imfs = Imfs::new();
imfs.add_root(root.path())?;
let mut expected_roots = HashSet::new();
expected_roots.insert(root.path().to_path_buf());
let root_item = {
let mut children = BTreeSet::new();
children.insert(foo_path.clone());
children.insert(bar_path.clone());
ImfsItem::Directory(ImfsDirectory {
path: root.path().to_path_buf(),
children,
})
};
let foo_item = {
let mut children = BTreeSet::new();
children.insert(baz_path.clone());
ImfsItem::Directory(ImfsDirectory {
path: foo_path.clone(),
children,
})
};
let bar_item = ImfsItem::File(ImfsFile {
path: bar_path.clone(),
contents: b"bar".to_vec(),
});
let baz_item = ImfsItem::File(ImfsFile {
path: baz_path.clone(),
contents: b"baz".to_vec(),
});
let mut expected_items = HashMap::new();
expected_items.insert(root.path().to_path_buf(), root_item);
expected_items.insert(foo_path.clone(), foo_item);
expected_items.insert(bar_path.clone(), bar_item);
expected_items.insert(baz_path.clone(), baz_item);
let expected_imfs = ExpectedImfs {
roots: expected_roots,
items: expected_items,
};
Ok((root, imfs, expected_imfs, resources))
}
#[test]
fn initial_read() -> Result<(), Error> {
let (_root, imfs, expected_imfs, _resources) = base_tree()?;
check_expected(&imfs, &expected_imfs);
Ok(())
}
#[test]
fn adding_files() -> Result<(), Error> {
let (root, mut imfs, mut expected_imfs, resources) = base_tree()?;
check_expected(&imfs, &expected_imfs);
let add_one_path = root.path().join("add_one.txt");
let add_two_path = resources.foo_path.join("add_two.txt");
fs::write(&add_one_path, b"add_one")?;
fs::write(&add_two_path, b"add_two")?;
match expected_imfs.items.get_mut(root.path()) {
Some(ImfsItem::Directory(directory)) => {
directory.children.insert(add_one_path.clone());
},
_ => unreachable!(),
}
match expected_imfs.items.get_mut(&resources.foo_path) {
Some(ImfsItem::Directory(directory)) => {
directory.children.insert(add_two_path.clone());
},
_ => unreachable!(),
}
expected_imfs.items.insert(add_one_path.clone(), ImfsItem::File(ImfsFile {
path: add_one_path.clone(),
contents: b"add_one".to_vec(),
}));
expected_imfs.items.insert(add_two_path.clone(), ImfsItem::File(ImfsFile {
path: add_two_path.clone(),
contents: b"add_two".to_vec(),
}));
imfs.path_created(&add_one_path)?;
imfs.path_created(&add_two_path)?;
check_expected(&imfs, &expected_imfs);
Ok(())
}
#[test]
fn adding_folder() -> Result<(), Error> {
let (root, imfs, mut expected_imfs, _resources) = base_tree()?;
check_expected(&imfs, &expected_imfs);
let folder_path = root.path().join("folder");
let file1_path = folder_path.join("file1.txt");
let file2_path = folder_path.join("file2.txt");
fs::create_dir(&folder_path)?;
fs::write(&file1_path, b"file1")?;
fs::write(&file2_path, b"file2")?;
match expected_imfs.items.get_mut(root.path()) {
Some(ImfsItem::Directory(directory)) => {
directory.children.insert(folder_path.clone());
},
_ => unreachable!(),
}
let folder_item = {
let mut children = BTreeSet::new();
children.insert(file1_path.clone());
children.insert(file2_path.clone());
ImfsItem::Directory(ImfsDirectory {
path: folder_path.clone(),
children,
})
};
expected_imfs.items.insert(folder_path.clone(), folder_item);
let file1_item = ImfsItem::File(ImfsFile {
path: file1_path.clone(),
contents: b"file1".to_vec(),
});
expected_imfs.items.insert(file1_path.clone(), file1_item);
let file2_item = ImfsItem::File(ImfsFile {
path: file2_path.clone(),
contents: b"file2".to_vec(),
});
expected_imfs.items.insert(file2_path.clone(), file2_item);
let possible_event_sequences = vec![
vec![
FsEvent::Created(folder_path.clone())
],
vec![
FsEvent::Created(folder_path.clone()),
FsEvent::Created(file1_path.clone()),
FsEvent::Created(file2_path.clone()),
],
vec![
FsEvent::Created(file1_path.clone()),
FsEvent::Created(file2_path.clone()),
FsEvent::Created(folder_path.clone()),
],
vec![
FsEvent::Created(file1_path.clone()),
FsEvent::Created(folder_path.clone()),
FsEvent::Created(file2_path.clone()),
],
];
for events in &possible_event_sequences {
let mut imfs = imfs.clone();
send_events(&mut imfs, events)?;
check_expected(&imfs, &expected_imfs);
}
Ok(())
}
#[test]
fn updating_files() -> Result<(), Error> {
let (_root, mut imfs, mut expected_imfs, resources) = base_tree()?;
check_expected(&imfs, &expected_imfs);
fs::write(&resources.bar_path, b"bar updated")?;
fs::write(&resources.baz_path, b"baz updated")?;
imfs.path_updated(&resources.bar_path)?;
imfs.path_updated(&resources.baz_path)?;
let bar_updated_item = ImfsItem::File(ImfsFile {
path: resources.bar_path.clone(),
contents: b"bar updated".to_vec(),
});
let baz_updated_item = ImfsItem::File(ImfsFile {
path: resources.baz_path.clone(),
contents: b"baz updated".to_vec(),
});
expected_imfs.items.insert(resources.bar_path.clone(), bar_updated_item);
expected_imfs.items.insert(resources.baz_path.clone(), baz_updated_item);
check_expected(&imfs, &expected_imfs);
Ok(())
}
#[test]
fn removing_file() -> Result<(), Error> {
let (root, mut imfs, mut expected_imfs, resources) = base_tree()?;
check_expected(&imfs, &expected_imfs);
fs::remove_file(&resources.bar_path)?;
imfs.path_removed(&resources.bar_path)?;
match expected_imfs.items.get_mut(root.path()) {
Some(ImfsItem::Directory(directory)) => {
directory.children.remove(&resources.bar_path);
},
_ => unreachable!(),
}
expected_imfs.items.remove(&resources.bar_path);
check_expected(&imfs, &expected_imfs);
Ok(())
}
#[test]
fn removing_folder() -> Result<(), Error> {
let (root, imfs, mut expected_imfs, resources) = base_tree()?;
check_expected(&imfs, &expected_imfs);
fs::remove_dir_all(&resources.foo_path)?;
match expected_imfs.items.get_mut(root.path()) {
Some(ImfsItem::Directory(directory)) => {
directory.children.remove(&resources.foo_path);
},
_ => unreachable!(),
}
expected_imfs.items.remove(&resources.foo_path);
expected_imfs.items.remove(&resources.baz_path);
let possible_event_sequences = vec![
vec![
FsEvent::Removed(resources.foo_path.clone()),
],
vec![
FsEvent::Removed(resources.baz_path.clone()),
FsEvent::Removed(resources.foo_path.clone()),
],
vec![
FsEvent::Removed(resources.foo_path.clone()),
FsEvent::Removed(resources.baz_path.clone()),
],
];
for events in &possible_event_sequences {
let mut imfs = imfs.clone();
send_events(&mut imfs, events)?;
check_expected(&imfs, &expected_imfs);
}
Ok(())
}

View File

@@ -1,55 +0,0 @@
use std::{
path::{Path, PathBuf},
sync::Arc,
};
use librojo::{
live_session::LiveSession,
project::Project,
};
lazy_static::lazy_static! {
static ref TEST_PROJECTS_ROOT: PathBuf = {
Path::new(env!("CARGO_MANIFEST_DIR")).join("../test-projects")
};
}
#[test]
fn bad_json_model() {
let project = Project::load_fuzzy(&TEST_PROJECTS_ROOT.join("bad_json_model"))
.expect("Project file didn't load");
if LiveSession::new(Arc::new(project)).is_ok() {
panic!("Project should not have succeeded");
}
}
#[test]
fn bad_meta_lua_classname() {
let project = Project::load_fuzzy(&TEST_PROJECTS_ROOT.join("bad_meta_lua_classname"))
.expect("Project file didn't load");
if LiveSession::new(Arc::new(project)).is_ok() {
panic!("Project should not have succeeded");
}
}
#[test]
fn bad_meta_rbxmx_properties() {
let project = Project::load_fuzzy(&TEST_PROJECTS_ROOT.join("bad_meta_rbxmx_properties"))
.expect("Project file didn't load");
if LiveSession::new(Arc::new(project)).is_ok() {
panic!("Project should not have succeeded");
}
}
#[test]
fn bad_missing_files() {
let project = Project::load_fuzzy(&TEST_PROJECTS_ROOT.join("bad_missing_files"))
.expect("Project file didn't load");
if LiveSession::new(Arc::new(project)).is_ok() {
panic!("Project should not have succeeded");
}
}

View File

@@ -1,111 +0,0 @@
mod test_util;
use std::collections::HashMap;
use rbx_dom_weak::{RbxTree, RbxInstanceProperties};
use librojo::{
snapshot_reconciler::{RbxSnapshotInstance, reconcile_subtree},
};
use test_util::tree::trees_equal;
#[test]
fn patch_communicativity() {
let base_tree = RbxTree::new(RbxInstanceProperties {
name: "DataModel".into(),
class_name: "DataModel".into(),
properties: HashMap::new(),
});
let patch_a = RbxSnapshotInstance {
name: "DataModel".into(),
class_name: "DataModel".into(),
children: vec![
RbxSnapshotInstance {
name: "Child-A".into(),
class_name: "Folder".into(),
..Default::default()
},
],
..Default::default()
};
let patch_b = RbxSnapshotInstance {
name: "DataModel".into(),
class_name: "DataModel".into(),
children: vec![
RbxSnapshotInstance {
name: "Child-B".into(),
class_name: "Folder".into(),
..Default::default()
},
],
..Default::default()
};
let patch_combined = RbxSnapshotInstance {
name: "DataModel".into(),
class_name: "DataModel".into(),
children: vec![
RbxSnapshotInstance {
name: "Child-A".into(),
class_name: "Folder".into(),
..Default::default()
},
RbxSnapshotInstance {
name: "Child-B".into(),
class_name: "Folder".into(),
..Default::default()
},
],
..Default::default()
};
let root_id = base_tree.get_root_id();
let mut tree_a = base_tree.clone();
reconcile_subtree(
&mut tree_a,
root_id,
&patch_a,
&mut Default::default(),
&mut Default::default(),
&mut Default::default(),
);
reconcile_subtree(
&mut tree_a,
root_id,
&patch_combined,
&mut Default::default(),
&mut Default::default(),
&mut Default::default(),
);
let mut tree_b = base_tree.clone();
reconcile_subtree(
&mut tree_b,
root_id,
&patch_b,
&mut Default::default(),
&mut Default::default(),
&mut Default::default(),
);
reconcile_subtree(
&mut tree_b,
root_id,
&patch_combined,
&mut Default::default(),
&mut Default::default(),
&mut Default::default(),
);
match trees_equal(&tree_a, &tree_b) {
Ok(_) => {}
Err(e) => panic!("{}", e),
}
}

View File

@@ -1,68 +0,0 @@
mod test_util;
use std::path::Path;
use librojo::{
imfs::Imfs,
project::Project,
rbx_snapshot::{SnapshotContext, snapshot_project_tree},
};
use crate::test_util::{
snapshot::*,
};
macro_rules! generate_snapshot_tests {
($($name: ident),*) => {
$(
paste::item! {
#[test]
fn [<snapshot_ $name>]() {
let _ = env_logger::try_init();
let tests_folder = Path::new(env!("CARGO_MANIFEST_DIR")).join("../test-projects");
let project_folder = tests_folder.join(stringify!($name));
run_snapshot_test(&project_folder);
}
}
)*
};
}
generate_snapshot_tests!(
empty,
json_model,
meta_files,
multi_partition_game,
nested_partitions,
single_partition_game,
single_partition_model,
transmute_partition
);
fn run_snapshot_test(path: &Path) {
println!("Running snapshot from project: {}", path.display());
let project = Project::load_fuzzy(path)
.expect("Couldn't load project file for snapshot test");
let mut imfs = Imfs::new();
imfs.add_roots_from_project(&project)
.expect("Could not add IMFS roots to snapshot project");
let context = SnapshotContext {
plugin_context: None,
};
let mut snapshot = snapshot_project_tree(&context, &imfs, &project)
.expect("Could not generate snapshot for snapshot test");
if let Some(snapshot) = snapshot.as_mut() {
anonymize_snapshot(path, snapshot);
}
match read_expected_snapshot(path) {
Some(expected_snapshot) => assert_eq!(snapshot, expected_snapshot),
None => write_expected_snapshot(path, &snapshot),
}
}

View File

@@ -1,36 +0,0 @@
#![allow(dead_code)]
use std::fs::{create_dir, copy};
use std::path::Path;
use std::io;
use walkdir::WalkDir;
pub mod snapshot;
pub mod tree;
pub fn copy_recursive(from: &Path, to: &Path) -> io::Result<()> {
for entry in WalkDir::new(from) {
let entry = entry?;
let path = entry.path();
let new_path = to.join(path.strip_prefix(from).unwrap());
let file_type = entry.file_type();
if file_type.is_dir() {
match create_dir(new_path) {
Ok(_) => {},
Err(err) => match err.kind() {
io::ErrorKind::AlreadyExists => {},
_ => panic!(err),
}
}
} else if file_type.is_file() {
copy(path, new_path)?;
} else {
unimplemented!("no symlinks please");
}
}
Ok(())
}

View File

@@ -1,79 +0,0 @@
use std::{
fs::{self, File},
path::{Path, PathBuf},
};
use librojo::{
project::ProjectNode,
snapshot_reconciler::RbxSnapshotInstance,
rbx_session::MetadataPerInstance,
};
const SNAPSHOT_EXPECTED_NAME: &str = "expected-snapshot.json";
/// Snapshots contain absolute paths, which simplifies much of Rojo.
///
/// For saving snapshots to the disk, we should strip off the project folder
/// path to make them machine-independent. This doesn't work for paths that fall
/// outside of the project folder, but that's okay here.
///
/// We also need to sort children, since Rojo tends to enumerate the filesystem
/// in an unpredictable order.
pub fn anonymize_snapshot(project_folder_path: &Path, snapshot: &mut RbxSnapshotInstance) {
anonymize_metadata(project_folder_path, &mut snapshot.metadata);
snapshot.children.sort_by(|a, b| a.partial_cmp(b).unwrap());
for child in snapshot.children.iter_mut() {
anonymize_snapshot(project_folder_path, child);
}
}
pub fn anonymize_metadata(project_folder_path: &Path, metadata: &mut MetadataPerInstance) {
match metadata.source_path.as_mut() {
Some(path) => *path = anonymize_path(project_folder_path, path),
None => {},
}
match metadata.project_definition.as_mut() {
Some((_, project_node)) => anonymize_project_node(project_folder_path, project_node),
None => {},
}
}
pub fn anonymize_project_node(project_folder_path: &Path, project_node: &mut ProjectNode) {
match project_node.path.as_mut() {
Some(path) => *path = anonymize_path(project_folder_path, path),
None => {},
}
for child_node in project_node.children.values_mut() {
anonymize_project_node(project_folder_path, child_node);
}
}
pub fn anonymize_path(project_folder_path: &Path, path: &Path) -> PathBuf {
if path.is_absolute() {
path.strip_prefix(project_folder_path)
.expect("Could not anonymize absolute path")
.to_path_buf()
} else {
path.to_path_buf()
}
}
pub fn read_expected_snapshot(path: &Path) -> Option<Option<RbxSnapshotInstance<'static>>> {
let contents = fs::read(path.join(SNAPSHOT_EXPECTED_NAME)).ok()?;
let snapshot: Option<RbxSnapshotInstance<'static>> = serde_json::from_slice(&contents)
.expect("Could not deserialize snapshot");
Some(snapshot)
}
pub fn write_expected_snapshot(path: &Path, snapshot: &Option<RbxSnapshotInstance>) {
let mut file = File::create(path.join(SNAPSHOT_EXPECTED_NAME))
.expect("Could not open file to write snapshot");
serde_json::to_writer_pretty(&mut file, snapshot)
.expect("Could not serialize snapshot to file");
}

View File

@@ -1,351 +0,0 @@
//! Defines a mechanism to compare two RbxTree objects and generate a useful
//! diff if they aren't the same. These methods ignore IDs, which are randomly
//! generated whenever a tree is constructed anyways. This makes matching up
//! pairs of instances that should be the same potentially difficult.
//!
//! It relies on a couple different ideas:
//! - Instances with the same name and class name are matched as the same
//! instance. See basic_equal for this logic
//! - A path of period-delimited names (like Roblox's GetFullName) should be
//! enough to debug most issues. If it isn't, we can do something fun like
//! generate GraphViz graphs.
use std::{
borrow::Cow,
collections::{HashMap, HashSet},
fmt,
fs::{self, File},
hash::Hash,
path::{Path, PathBuf},
};
use log::error;
use serde::{Serialize, Deserialize};
use rbx_dom_weak::{RbxId, RbxTree};
use librojo::{
rbx_session::MetadataPerInstance,
live_session::LiveSession,
visualize::{VisualizeRbxTree, graphviz_to_svg},
};
use super::snapshot::anonymize_metadata;
/// Marks a 'step' in the test, which will snapshot the session's current
/// RbxTree object and compare it against the saved snapshot if it exists.
pub fn tree_step(step: &str, live_session: &LiveSession, source_path: &Path) {
let rbx_session = live_session.rbx_session.lock().unwrap();
let tree = rbx_session.get_tree();
let project_folder = live_session.root_project().folder_location();
let metadata = rbx_session.get_all_instance_metadata()
.iter()
.map(|(key, meta)| {
let mut meta = meta.clone();
anonymize_metadata(project_folder, &mut meta);
(*key, meta)
})
.collect();
let tree_with_metadata = TreeWithMetadata {
tree: Cow::Borrowed(&tree),
metadata: Cow::Owned(metadata),
};
match read_tree_by_name(source_path, step) {
Some(expected) => match trees_and_metadata_equal(&expected, &tree_with_metadata) {
Ok(_) => {}
Err(e) => {
error!("Trees at step '{}' were not equal.\n{}", step, e);
let expected_gv = format!("{}", VisualizeRbxTree {
tree: &expected.tree,
metadata: &expected.metadata,
});
let actual_gv = format!("{}", VisualizeRbxTree {
tree: &tree_with_metadata.tree,
metadata: &tree_with_metadata.metadata,
});
let output_dir = PathBuf::from("failed-snapshots");
fs::create_dir_all(&output_dir)
.expect("Could not create failed-snapshots directory");
let expected_basename = format!("{}-{}-expected", live_session.root_project().name, step);
let actual_basename = format!("{}-{}-actual", live_session.root_project().name, step);
let mut expected_out = output_dir.join(expected_basename);
let mut actual_out = output_dir.join(actual_basename);
match (graphviz_to_svg(&expected_gv), graphviz_to_svg(&actual_gv)) {
(Some(expected_svg), Some(actual_svg)) => {
expected_out.set_extension("svg");
actual_out.set_extension("svg");
fs::write(&expected_out, expected_svg)
.expect("Couldn't write expected SVG");
fs::write(&actual_out, actual_svg)
.expect("Couldn't write actual SVG");
}
_ => {
expected_out.set_extension("gv");
actual_out.set_extension("gv");
fs::write(&expected_out, expected_gv)
.expect("Couldn't write expected GV");
fs::write(&actual_out, actual_gv)
.expect("Couldn't write actual GV");
}
}
error!("Output at {} and {}", expected_out.display(), actual_out.display());
panic!("Tree mismatch at step '{}'", step);
}
}
None => {
write_tree_by_name(source_path, step, &tree_with_metadata);
}
}
}
fn new_cow_map<K: Clone + Eq + Hash, V: Clone>() -> Cow<'static, HashMap<K, V>> {
Cow::Owned(HashMap::new())
}
#[derive(Debug, Serialize, Deserialize)]
struct TreeWithMetadata<'a> {
#[serde(flatten)]
pub tree: Cow<'a, RbxTree>,
#[serde(default = "new_cow_map")]
pub metadata: Cow<'a, HashMap<RbxId, MetadataPerInstance>>,
}
fn read_tree_by_name(path: &Path, identifier: &str) -> Option<TreeWithMetadata<'static>> {
let mut file_path = path.join(identifier);
file_path.set_extension("tree.json");
let contents = fs::read(&file_path).ok()?;
let tree: TreeWithMetadata = serde_json::from_slice(&contents)
.expect("Could not deserialize tree");
Some(tree)
}
fn write_tree_by_name(path: &Path, identifier: &str, tree: &TreeWithMetadata) {
let mut file_path = path.join(identifier);
file_path.set_extension("tree.json");
let mut file = File::create(file_path)
.expect("Could not open file to write tree");
serde_json::to_writer_pretty(&mut file, tree)
.expect("Could not serialize tree to file");
}
#[derive(Debug)]
pub struct TreeMismatch {
pub path: Cow<'static, str>,
pub detail: Cow<'static, str>,
}
impl TreeMismatch {
pub fn new<'a, A: Into<Cow<'a, str>>, B: Into<Cow<'a, str>>>(path: A, detail: B) -> TreeMismatch {
TreeMismatch {
path: Cow::Owned(path.into().into_owned()),
detail: Cow::Owned(detail.into().into_owned()),
}
}
fn add_parent(mut self, name: &str) -> TreeMismatch {
self.path.to_mut().insert(0, '.');
self.path.to_mut().insert_str(0, name);
TreeMismatch {
path: self.path,
detail: self.detail,
}
}
}
impl fmt::Display for TreeMismatch {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
writeln!(formatter, "Tree mismatch at path {}", self.path)?;
writeln!(formatter, "{}", self.detail)
}
}
pub fn trees_equal(
left_tree: &RbxTree,
right_tree: &RbxTree,
) -> Result<(), TreeMismatch> {
let left = TreeWithMetadata {
tree: Cow::Borrowed(left_tree),
metadata: Cow::Owned(HashMap::new()),
};
let right = TreeWithMetadata {
tree: Cow::Borrowed(right_tree),
metadata: Cow::Owned(HashMap::new()),
};
trees_and_metadata_equal(&left, &right)
}
fn trees_and_metadata_equal(
left_tree: &TreeWithMetadata,
right_tree: &TreeWithMetadata,
) -> Result<(), TreeMismatch> {
let left_id = left_tree.tree.get_root_id();
let right_id = right_tree.tree.get_root_id();
instances_equal(left_tree, left_id, right_tree, right_id)
}
fn instances_equal(
left_tree: &TreeWithMetadata,
left_id: RbxId,
right_tree: &TreeWithMetadata,
right_id: RbxId,
) -> Result<(), TreeMismatch> {
basic_equal(left_tree, left_id, right_tree, right_id)?;
properties_equal(left_tree, left_id, right_tree, right_id)?;
children_equal(left_tree, left_id, right_tree, right_id)?;
metadata_equal(left_tree, left_id, right_tree, right_id)
}
fn basic_equal(
left_tree: &TreeWithMetadata,
left_id: RbxId,
right_tree: &TreeWithMetadata,
right_id: RbxId,
) -> Result<(), TreeMismatch> {
let left_instance = left_tree.tree.get_instance(left_id)
.expect("ID did not exist in left tree");
let right_instance = right_tree.tree.get_instance(right_id)
.expect("ID did not exist in right tree");
if left_instance.name != right_instance.name {
let message = format!("Name did not match ('{}' vs '{}')", left_instance.name, right_instance.name);
return Err(TreeMismatch::new(&left_instance.name, message));
}
if left_instance.class_name != right_instance.class_name {
let message = format!("Class name did not match ('{}' vs '{}')", left_instance.class_name, right_instance.class_name);
return Err(TreeMismatch::new(&left_instance.name, message));
}
Ok(())
}
fn properties_equal(
left_tree: &TreeWithMetadata,
left_id: RbxId,
right_tree: &TreeWithMetadata,
right_id: RbxId,
) -> Result<(), TreeMismatch> {
let left_instance = left_tree.tree.get_instance(left_id)
.expect("ID did not exist in left tree");
let right_instance = right_tree.tree.get_instance(right_id)
.expect("ID did not exist in right tree");
let mut visited = HashSet::new();
for (key, left_value) in &left_instance.properties {
visited.insert(key);
let right_value = right_instance.properties.get(key);
if Some(left_value) != right_value {
let message = format!(
"Property {}:\n\tLeft: {:?}\n\tRight: {:?}",
key,
Some(left_value),
right_value,
);
return Err(TreeMismatch::new(&left_instance.name, message));
}
}
for (key, right_value) in &right_instance.properties {
if visited.contains(key) {
continue;
}
let left_value = left_instance.properties.get(key);
if left_value != Some(right_value) {
let message = format!(
"Property {}:\n\tLeft: {:?}\n\tRight: {:?}",
key,
left_value,
Some(right_value),
);
return Err(TreeMismatch::new(&left_instance.name, message));
}
}
Ok(())
}
fn children_equal(
left_tree: &TreeWithMetadata,
left_id: RbxId,
right_tree: &TreeWithMetadata,
right_id: RbxId,
) -> Result<(), TreeMismatch> {
let left_instance = left_tree.tree.get_instance(left_id)
.expect("ID did not exist in left tree");
let right_instance = right_tree.tree.get_instance(right_id)
.expect("ID did not exist in right tree");
let left_children = left_instance.get_children_ids();
let right_children = right_instance.get_children_ids();
if left_children.len() != right_children.len() {
return Err(TreeMismatch::new(&left_instance.name, "Instances had different numbers of children"));
}
for (left_child_id, right_child_id) in left_children.iter().zip(right_children) {
instances_equal(left_tree, *left_child_id, right_tree, *right_child_id)
.map_err(|e| e.add_parent(&left_instance.name))?;
}
Ok(())
}
fn metadata_equal(
left_tree: &TreeWithMetadata,
left_id: RbxId,
right_tree: &TreeWithMetadata,
right_id: RbxId,
) -> Result<(), TreeMismatch> {
let left_meta = left_tree.metadata.get(&left_id);
let right_meta = right_tree.metadata.get(&right_id);
if left_meta != right_meta {
let left_instance = left_tree.tree.get_instance(left_id)
.expect("Left instance didn't exist in tree");
let message = format!(
"Metadata mismatch:\n\tLeft: {:?}\n\tRight: {:?}",
left_meta,
right_meta,
);
return Err(TreeMismatch::new(&left_instance.name, message));
}
Ok(())
}

View File

@@ -1,77 +0,0 @@
mod test_util;
use std::{
fs,
path::{Path, PathBuf},
sync::Arc,
thread,
time::Duration,
};
use tempfile::{tempdir, TempDir};
use librojo::{
live_session::LiveSession,
project::Project,
};
use crate::test_util::{
copy_recursive,
tree::tree_step,
};
#[test]
fn multi_partition_game() {
let _ = env_logger::try_init();
let source_path = project_path("multi_partition_game");
let (dir, live_session) = start_session(&source_path);
tree_step("initial", &live_session, &source_path);
let added_path = dir.path().join("a/added");
fs::create_dir_all(&added_path)
.expect("Couldn't create directory");
thread::sleep(Duration::from_millis(250));
tree_step("with_dir", &live_session, &source_path);
let moved_path = dir.path().join("b/added");
fs::rename(&added_path, &moved_path)
.expect("Couldn't rename directory");
thread::sleep(Duration::from_millis(250));
tree_step("with_moved_dir", &live_session, &source_path);
}
#[test]
fn spawn_location() {
let _ = env_logger::try_init();
let source_path = project_path("spawn_location");
let (_dir, live_session) = start_session(&source_path);
tree_step("initial", &live_session, &source_path);
}
/// Find the path to the given test project relative to the manifest.
fn project_path(name: &str) -> PathBuf {
let mut path = Path::new(env!("CARGO_MANIFEST_DIR")).join("../test-projects");
path.push(name);
path
}
/// Starts a new LiveSession for the project located at the given file path.
fn start_session(source_path: &Path) -> (TempDir, LiveSession) {
let dir = tempdir()
.expect("Couldn't create temporary directory");
copy_recursive(&source_path, dir.path())
.expect("Couldn't copy project to temporary directory");
let project = Arc::new(Project::load_fuzzy(dir.path())
.expect("Couldn't load project from temp directory"));
let live_session = LiveSession::new(Arc::clone(&project))
.expect("Couldn't start live session");
(dir, live_session)
}

View File

@@ -1,20 +0,0 @@
{
"name": "empty",
"class_name": "DataModel",
"properties": {},
"children": [],
"metadata": {
"ignore_unknown_instances": true,
"source_path": null,
"project_definition": [
"empty",
{
"class_name": "DataModel",
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": null
}
]
}
}

View File

@@ -0,0 +1,6 @@
{
"name": "empty_folder",
"tree": {
"$path": "src"
}
}

View File

View File

@@ -1,76 +0,0 @@
{
"name": "json_model",
"class_name": "Folder",
"properties": {},
"children": [
{
"name": "children",
"class_name": "Folder",
"properties": {},
"children": [
{
"name": "The Child",
"class_name": "StringValue",
"properties": {},
"children": [],
"metadata": {
"ignore_unknown_instances": false,
"source_path": null,
"project_definition": null
}
}
],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "src/children.model.json",
"project_definition": null
}
},
{
"name": "explicit",
"class_name": "StringValue",
"properties": {
"Value": {
"Type": "String",
"Value": "Hello, world!"
}
},
"children": [],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "src/explicit.model.json",
"project_definition": null
}
},
{
"name": "implicit",
"class_name": "StringValue",
"properties": {
"Value": {
"Type": "String",
"Value": "What's happenin', Earth?"
}
},
"children": [],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "src/implicit.model.json",
"project_definition": null
}
}
],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "src",
"project_definition": [
"json_model",
{
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "src"
}
]
}
}

View File

@@ -1,129 +0,0 @@
{
"name": "test-model",
"class_name": "Tool",
"properties": {
"Enabled": {
"Type": "Bool",
"Value": true
}
},
"children": [
{
"name": "A",
"class_name": "Folder",
"properties": {},
"children": [],
"metadata": {
"ignore_unknown_instances": true,
"source_path": "src/A",
"project_definition": null
}
},
{
"name": "DisableMe",
"class_name": "Script",
"properties": {
"Disabled": {
"Type": "Bool",
"Value": true
},
"Source": {
"Type": "String",
"Value": ""
}
},
"children": [],
"metadata": {
"ignore_unknown_instances": true,
"source_path": "src/DisableMe.server.lua",
"project_definition": null
}
},
{
"name": "LocalizationTable",
"class_name": "LocalizationTable",
"properties": {
"Contents": {
"Type": "String",
"Value": "[{\"key\":\"Doge\",\"example\":\"A funny dog\",\"source\":\"Perro!\",\"values\":{\"en\":\"Doge!\"}}]"
},
"SourceLocaleId": {
"Type": "String",
"Value": "es"
}
},
"children": [],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "src/LocalizationTable.csv",
"project_definition": null
}
},
{
"name": "RobloxInstance",
"class_name": "Folder",
"properties": {
"Tags": {
"Type": "BinaryString",
"Value": ""
}
},
"children": [],
"metadata": {
"ignore_unknown_instances": true,
"source_path": "src/RobloxInstance.rbxmx",
"project_definition": null
}
},
{
"name": "Script",
"class_name": "Script",
"properties": {
"Source": {
"Type": "String",
"Value": "print(\"Hello, world\")"
},
"Disabled": {
"Type": "Bool",
"Value": true
}
},
"children": [],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "src/Script",
"project_definition": null
}
},
{
"name": "StringValue",
"class_name": "StringValue",
"properties": {
"Value": {
"Type": "String",
"Value": "I'm supposed to put funny text here, aren't I? Oh well."
}
},
"children": [],
"metadata": {
"ignore_unknown_instances": true,
"source_path": "src/StringValue.txt",
"project_definition": null
}
}
],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "src",
"project_definition": [
"test-model",
{
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "src"
}
]
}
}

View File

@@ -1,212 +0,0 @@
{
"name": "multi_partition_game",
"class_name": "DataModel",
"properties": {},
"children": [
{
"name": "HttpService",
"class_name": "HttpService",
"properties": {
"HttpEnabled": {
"Type": "Bool",
"Value": true
}
},
"children": [],
"metadata": {
"ignore_unknown_instances": true,
"source_path": null,
"project_definition": [
"HttpService",
{
"class_name": "HttpService",
"children": {},
"properties": {
"HttpEnabled": {
"Type": "Bool",
"Value": true
}
},
"ignore_unknown_instances": null,
"path": null
}
]
}
},
{
"name": "ReplicatedStorage",
"class_name": "ReplicatedStorage",
"properties": {},
"children": [
{
"name": "Ack",
"class_name": "Folder",
"properties": {},
"children": [
{
"name": "foo",
"class_name": "StringValue",
"properties": {
"Value": {
"Type": "String",
"Value": "Hello world, from a/foo.txt"
}
},
"children": [],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "a/foo.txt",
"project_definition": null
}
},
{
"name": "main",
"class_name": "ModuleScript",
"properties": {
"Source": {
"Type": "String",
"Value": "-- hello, from a/main.lua"
}
},
"children": [],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "a/main.lua",
"project_definition": null
}
}
],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "a",
"project_definition": [
"Ack",
{
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "a"
}
]
}
},
{
"name": "Bar",
"class_name": "Folder",
"properties": {},
"children": [
{
"name": "something",
"class_name": "ModuleScript",
"properties": {
"Source": {
"Type": "String",
"Value": "-- b/something.lua"
}
},
"children": [],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "b/something.lua",
"project_definition": null
}
}
],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "b",
"project_definition": [
"Bar",
{
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "b"
}
]
}
}
],
"metadata": {
"ignore_unknown_instances": true,
"source_path": null,
"project_definition": [
"ReplicatedStorage",
{
"class_name": "ReplicatedStorage",
"children": {
"Bar": {
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "b"
},
"Ack": {
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "a"
}
},
"properties": {},
"ignore_unknown_instances": null,
"path": null
}
]
}
}
],
"metadata": {
"ignore_unknown_instances": true,
"source_path": null,
"project_definition": [
"multi_partition_game",
{
"class_name": "DataModel",
"children": {
"ReplicatedStorage": {
"class_name": "ReplicatedStorage",
"children": {
"Bar": {
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "b"
},
"Ack": {
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "a"
}
},
"properties": {},
"ignore_unknown_instances": null,
"path": null
},
"HttpService": {
"class_name": "HttpService",
"children": {},
"properties": {
"HttpEnabled": {
"Type": "Bool",
"Value": true
}
},
"ignore_unknown_instances": null,
"path": null
}
},
"properties": {},
"ignore_unknown_instances": null,
"path": null
}
]
}
}

View File

@@ -1,242 +0,0 @@
{
"instances": {
"00f207b1-fc18-4088-a45e-caf8cd98f5dd": {
"Name": "main",
"ClassName": "ModuleScript",
"Properties": {
"Source": {
"Type": "String",
"Value": "-- hello, from a/main.lua"
}
},
"Id": "00f207b1-fc18-4088-a45e-caf8cd98f5dd",
"Children": [],
"Parent": "14fed1a3-ba97-46a6-ae93-ac26bd9471df"
},
"14fed1a3-ba97-46a6-ae93-ac26bd9471df": {
"Name": "Ack",
"ClassName": "Folder",
"Properties": {},
"Id": "14fed1a3-ba97-46a6-ae93-ac26bd9471df",
"Children": [
"c55fd55c-258e-4a93-a63a-ea243038c9b9",
"00f207b1-fc18-4088-a45e-caf8cd98f5dd"
],
"Parent": "99eefe5f-ef74-49e6-8a8b-c833e00ca56b"
},
"c910510c-37a8-4fd8-ae41-01169ccb739c": {
"Name": "Bar",
"ClassName": "Folder",
"Properties": {},
"Id": "c910510c-37a8-4fd8-ae41-01169ccb739c",
"Children": [
"71a95983-c856-4cf2-aee6-bd8a523e80e4"
],
"Parent": "99eefe5f-ef74-49e6-8a8b-c833e00ca56b"
},
"c55fd55c-258e-4a93-a63a-ea243038c9b9": {
"Name": "foo",
"ClassName": "StringValue",
"Properties": {
"Value": {
"Type": "String",
"Value": "Hello world, from a/foo.txt"
}
},
"Id": "c55fd55c-258e-4a93-a63a-ea243038c9b9",
"Children": [],
"Parent": "14fed1a3-ba97-46a6-ae93-ac26bd9471df"
},
"71a95983-c856-4cf2-aee6-bd8a523e80e4": {
"Name": "something",
"ClassName": "ModuleScript",
"Properties": {
"Source": {
"Type": "String",
"Value": "-- b/something.lua"
}
},
"Id": "71a95983-c856-4cf2-aee6-bd8a523e80e4",
"Children": [],
"Parent": "c910510c-37a8-4fd8-ae41-01169ccb739c"
},
"3b5af13f-c997-4009-915c-0810b0e83032": {
"Name": "multi_partition_game",
"ClassName": "DataModel",
"Properties": {},
"Id": "3b5af13f-c997-4009-915c-0810b0e83032",
"Children": [
"bf8e2d4f-33a0-42a0-8168-1b62d6ac050c",
"99eefe5f-ef74-49e6-8a8b-c833e00ca56b"
],
"Parent": null
},
"bf8e2d4f-33a0-42a0-8168-1b62d6ac050c": {
"Name": "HttpService",
"ClassName": "HttpService",
"Properties": {
"HttpEnabled": {
"Type": "Bool",
"Value": true
}
},
"Id": "bf8e2d4f-33a0-42a0-8168-1b62d6ac050c",
"Children": [],
"Parent": "3b5af13f-c997-4009-915c-0810b0e83032"
},
"99eefe5f-ef74-49e6-8a8b-c833e00ca56b": {
"Name": "ReplicatedStorage",
"ClassName": "ReplicatedStorage",
"Properties": {},
"Id": "99eefe5f-ef74-49e6-8a8b-c833e00ca56b",
"Children": [
"14fed1a3-ba97-46a6-ae93-ac26bd9471df",
"c910510c-37a8-4fd8-ae41-01169ccb739c"
],
"Parent": "3b5af13f-c997-4009-915c-0810b0e83032"
}
},
"root_id": "3b5af13f-c997-4009-915c-0810b0e83032",
"metadata": {
"00f207b1-fc18-4088-a45e-caf8cd98f5dd": {
"ignore_unknown_instances": false,
"source_path": "a/main.lua",
"project_definition": null
},
"bf8e2d4f-33a0-42a0-8168-1b62d6ac050c": {
"ignore_unknown_instances": true,
"source_path": null,
"project_definition": [
"HttpService",
{
"class_name": "HttpService",
"children": {},
"properties": {
"HttpEnabled": {
"Type": "Bool",
"Value": true
}
},
"ignore_unknown_instances": null,
"path": null
}
]
},
"14fed1a3-ba97-46a6-ae93-ac26bd9471df": {
"ignore_unknown_instances": false,
"source_path": "a",
"project_definition": [
"Ack",
{
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "a"
}
]
},
"c55fd55c-258e-4a93-a63a-ea243038c9b9": {
"ignore_unknown_instances": false,
"source_path": "a/foo.txt",
"project_definition": null
},
"71a95983-c856-4cf2-aee6-bd8a523e80e4": {
"ignore_unknown_instances": false,
"source_path": "b/something.lua",
"project_definition": null
},
"c910510c-37a8-4fd8-ae41-01169ccb739c": {
"ignore_unknown_instances": false,
"source_path": "b",
"project_definition": [
"Bar",
{
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "b"
}
]
},
"99eefe5f-ef74-49e6-8a8b-c833e00ca56b": {
"ignore_unknown_instances": true,
"source_path": null,
"project_definition": [
"ReplicatedStorage",
{
"class_name": "ReplicatedStorage",
"children": {
"Ack": {
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "a"
},
"Bar": {
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "b"
}
},
"properties": {},
"ignore_unknown_instances": null,
"path": null
}
]
},
"3b5af13f-c997-4009-915c-0810b0e83032": {
"ignore_unknown_instances": true,
"source_path": null,
"project_definition": [
"multi_partition_game",
{
"class_name": "DataModel",
"children": {
"HttpService": {
"class_name": "HttpService",
"children": {},
"properties": {
"HttpEnabled": {
"Type": "Bool",
"Value": true
}
},
"ignore_unknown_instances": null,
"path": null
},
"ReplicatedStorage": {
"class_name": "ReplicatedStorage",
"children": {
"Ack": {
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "a"
},
"Bar": {
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "b"
}
},
"properties": {},
"ignore_unknown_instances": null,
"path": null
}
},
"properties": {},
"ignore_unknown_instances": null,
"path": null
}
]
}
}
}

View File

@@ -1,256 +0,0 @@
{
"instances": {
"b48b369f-5706-4029-9fa6-90651a4910ea": {
"Name": "added",
"ClassName": "Folder",
"Properties": {},
"Id": "b48b369f-5706-4029-9fa6-90651a4910ea",
"Children": [],
"Parent": "14fed1a3-ba97-46a6-ae93-ac26bd9471df"
},
"00f207b1-fc18-4088-a45e-caf8cd98f5dd": {
"Name": "main",
"ClassName": "ModuleScript",
"Properties": {
"Source": {
"Type": "String",
"Value": "-- hello, from a/main.lua"
}
},
"Id": "00f207b1-fc18-4088-a45e-caf8cd98f5dd",
"Children": [],
"Parent": "14fed1a3-ba97-46a6-ae93-ac26bd9471df"
},
"14fed1a3-ba97-46a6-ae93-ac26bd9471df": {
"Name": "Ack",
"ClassName": "Folder",
"Properties": {},
"Id": "14fed1a3-ba97-46a6-ae93-ac26bd9471df",
"Children": [
"b48b369f-5706-4029-9fa6-90651a4910ea",
"c55fd55c-258e-4a93-a63a-ea243038c9b9",
"00f207b1-fc18-4088-a45e-caf8cd98f5dd"
],
"Parent": "99eefe5f-ef74-49e6-8a8b-c833e00ca56b"
},
"c910510c-37a8-4fd8-ae41-01169ccb739c": {
"Name": "Bar",
"ClassName": "Folder",
"Properties": {},
"Id": "c910510c-37a8-4fd8-ae41-01169ccb739c",
"Children": [
"71a95983-c856-4cf2-aee6-bd8a523e80e4"
],
"Parent": "99eefe5f-ef74-49e6-8a8b-c833e00ca56b"
},
"c55fd55c-258e-4a93-a63a-ea243038c9b9": {
"Name": "foo",
"ClassName": "StringValue",
"Properties": {
"Value": {
"Type": "String",
"Value": "Hello world, from a/foo.txt"
}
},
"Id": "c55fd55c-258e-4a93-a63a-ea243038c9b9",
"Children": [],
"Parent": "14fed1a3-ba97-46a6-ae93-ac26bd9471df"
},
"71a95983-c856-4cf2-aee6-bd8a523e80e4": {
"Name": "something",
"ClassName": "ModuleScript",
"Properties": {
"Source": {
"Type": "String",
"Value": "-- b/something.lua"
}
},
"Id": "71a95983-c856-4cf2-aee6-bd8a523e80e4",
"Children": [],
"Parent": "c910510c-37a8-4fd8-ae41-01169ccb739c"
},
"3b5af13f-c997-4009-915c-0810b0e83032": {
"Name": "multi_partition_game",
"ClassName": "DataModel",
"Properties": {},
"Id": "3b5af13f-c997-4009-915c-0810b0e83032",
"Children": [
"bf8e2d4f-33a0-42a0-8168-1b62d6ac050c",
"99eefe5f-ef74-49e6-8a8b-c833e00ca56b"
],
"Parent": null
},
"bf8e2d4f-33a0-42a0-8168-1b62d6ac050c": {
"Name": "HttpService",
"ClassName": "HttpService",
"Properties": {
"HttpEnabled": {
"Type": "Bool",
"Value": true
}
},
"Id": "bf8e2d4f-33a0-42a0-8168-1b62d6ac050c",
"Children": [],
"Parent": "3b5af13f-c997-4009-915c-0810b0e83032"
},
"99eefe5f-ef74-49e6-8a8b-c833e00ca56b": {
"Name": "ReplicatedStorage",
"ClassName": "ReplicatedStorage",
"Properties": {},
"Id": "99eefe5f-ef74-49e6-8a8b-c833e00ca56b",
"Children": [
"14fed1a3-ba97-46a6-ae93-ac26bd9471df",
"c910510c-37a8-4fd8-ae41-01169ccb739c"
],
"Parent": "3b5af13f-c997-4009-915c-0810b0e83032"
}
},
"root_id": "3b5af13f-c997-4009-915c-0810b0e83032",
"metadata": {
"c55fd55c-258e-4a93-a63a-ea243038c9b9": {
"ignore_unknown_instances": false,
"source_path": "a/foo.txt",
"project_definition": null
},
"99eefe5f-ef74-49e6-8a8b-c833e00ca56b": {
"ignore_unknown_instances": true,
"source_path": null,
"project_definition": [
"ReplicatedStorage",
{
"class_name": "ReplicatedStorage",
"children": {
"Ack": {
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "a"
},
"Bar": {
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "b"
}
},
"properties": {},
"ignore_unknown_instances": null,
"path": null
}
]
},
"71a95983-c856-4cf2-aee6-bd8a523e80e4": {
"ignore_unknown_instances": false,
"source_path": "b/something.lua",
"project_definition": null
},
"00f207b1-fc18-4088-a45e-caf8cd98f5dd": {
"ignore_unknown_instances": false,
"source_path": "a/main.lua",
"project_definition": null
},
"bf8e2d4f-33a0-42a0-8168-1b62d6ac050c": {
"ignore_unknown_instances": true,
"source_path": null,
"project_definition": [
"HttpService",
{
"class_name": "HttpService",
"children": {},
"properties": {
"HttpEnabled": {
"Type": "Bool",
"Value": true
}
},
"ignore_unknown_instances": null,
"path": null
}
]
},
"b48b369f-5706-4029-9fa6-90651a4910ea": {
"ignore_unknown_instances": false,
"source_path": "a/added",
"project_definition": null
},
"3b5af13f-c997-4009-915c-0810b0e83032": {
"ignore_unknown_instances": true,
"source_path": null,
"project_definition": [
"multi_partition_game",
{
"class_name": "DataModel",
"children": {
"HttpService": {
"class_name": "HttpService",
"children": {},
"properties": {
"HttpEnabled": {
"Type": "Bool",
"Value": true
}
},
"ignore_unknown_instances": null,
"path": null
},
"ReplicatedStorage": {
"class_name": "ReplicatedStorage",
"children": {
"Ack": {
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "a"
},
"Bar": {
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "b"
}
},
"properties": {},
"ignore_unknown_instances": null,
"path": null
}
},
"properties": {},
"ignore_unknown_instances": null,
"path": null
}
]
},
"c910510c-37a8-4fd8-ae41-01169ccb739c": {
"ignore_unknown_instances": false,
"source_path": "b",
"project_definition": [
"Bar",
{
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "b"
}
]
},
"14fed1a3-ba97-46a6-ae93-ac26bd9471df": {
"ignore_unknown_instances": false,
"source_path": "a",
"project_definition": [
"Ack",
{
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "a"
}
]
}
}
}

View File

@@ -1,256 +0,0 @@
{
"instances": {
"866071d6-465a-4b88-8c63-07489d950916": {
"Name": "added",
"ClassName": "Folder",
"Properties": {},
"Id": "866071d6-465a-4b88-8c63-07489d950916",
"Children": [],
"Parent": "c910510c-37a8-4fd8-ae41-01169ccb739c"
},
"00f207b1-fc18-4088-a45e-caf8cd98f5dd": {
"Name": "main",
"ClassName": "ModuleScript",
"Properties": {
"Source": {
"Type": "String",
"Value": "-- hello, from a/main.lua"
}
},
"Id": "00f207b1-fc18-4088-a45e-caf8cd98f5dd",
"Children": [],
"Parent": "14fed1a3-ba97-46a6-ae93-ac26bd9471df"
},
"14fed1a3-ba97-46a6-ae93-ac26bd9471df": {
"Name": "Ack",
"ClassName": "Folder",
"Properties": {},
"Id": "14fed1a3-ba97-46a6-ae93-ac26bd9471df",
"Children": [
"c55fd55c-258e-4a93-a63a-ea243038c9b9",
"00f207b1-fc18-4088-a45e-caf8cd98f5dd"
],
"Parent": "99eefe5f-ef74-49e6-8a8b-c833e00ca56b"
},
"c910510c-37a8-4fd8-ae41-01169ccb739c": {
"Name": "Bar",
"ClassName": "Folder",
"Properties": {},
"Id": "c910510c-37a8-4fd8-ae41-01169ccb739c",
"Children": [
"866071d6-465a-4b88-8c63-07489d950916",
"71a95983-c856-4cf2-aee6-bd8a523e80e4"
],
"Parent": "99eefe5f-ef74-49e6-8a8b-c833e00ca56b"
},
"c55fd55c-258e-4a93-a63a-ea243038c9b9": {
"Name": "foo",
"ClassName": "StringValue",
"Properties": {
"Value": {
"Type": "String",
"Value": "Hello world, from a/foo.txt"
}
},
"Id": "c55fd55c-258e-4a93-a63a-ea243038c9b9",
"Children": [],
"Parent": "14fed1a3-ba97-46a6-ae93-ac26bd9471df"
},
"71a95983-c856-4cf2-aee6-bd8a523e80e4": {
"Name": "something",
"ClassName": "ModuleScript",
"Properties": {
"Source": {
"Type": "String",
"Value": "-- b/something.lua"
}
},
"Id": "71a95983-c856-4cf2-aee6-bd8a523e80e4",
"Children": [],
"Parent": "c910510c-37a8-4fd8-ae41-01169ccb739c"
},
"3b5af13f-c997-4009-915c-0810b0e83032": {
"Name": "multi_partition_game",
"ClassName": "DataModel",
"Properties": {},
"Id": "3b5af13f-c997-4009-915c-0810b0e83032",
"Children": [
"bf8e2d4f-33a0-42a0-8168-1b62d6ac050c",
"99eefe5f-ef74-49e6-8a8b-c833e00ca56b"
],
"Parent": null
},
"bf8e2d4f-33a0-42a0-8168-1b62d6ac050c": {
"Name": "HttpService",
"ClassName": "HttpService",
"Properties": {
"HttpEnabled": {
"Type": "Bool",
"Value": true
}
},
"Id": "bf8e2d4f-33a0-42a0-8168-1b62d6ac050c",
"Children": [],
"Parent": "3b5af13f-c997-4009-915c-0810b0e83032"
},
"99eefe5f-ef74-49e6-8a8b-c833e00ca56b": {
"Name": "ReplicatedStorage",
"ClassName": "ReplicatedStorage",
"Properties": {},
"Id": "99eefe5f-ef74-49e6-8a8b-c833e00ca56b",
"Children": [
"14fed1a3-ba97-46a6-ae93-ac26bd9471df",
"c910510c-37a8-4fd8-ae41-01169ccb739c"
],
"Parent": "3b5af13f-c997-4009-915c-0810b0e83032"
}
},
"root_id": "3b5af13f-c997-4009-915c-0810b0e83032",
"metadata": {
"bf8e2d4f-33a0-42a0-8168-1b62d6ac050c": {
"ignore_unknown_instances": true,
"source_path": null,
"project_definition": [
"HttpService",
{
"class_name": "HttpService",
"children": {},
"properties": {
"HttpEnabled": {
"Type": "Bool",
"Value": true
}
},
"ignore_unknown_instances": null,
"path": null
}
]
},
"c910510c-37a8-4fd8-ae41-01169ccb739c": {
"ignore_unknown_instances": false,
"source_path": "b",
"project_definition": [
"Bar",
{
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "b"
}
]
},
"866071d6-465a-4b88-8c63-07489d950916": {
"ignore_unknown_instances": false,
"source_path": "b/added",
"project_definition": null
},
"14fed1a3-ba97-46a6-ae93-ac26bd9471df": {
"ignore_unknown_instances": false,
"source_path": "a",
"project_definition": [
"Ack",
{
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "a"
}
]
},
"00f207b1-fc18-4088-a45e-caf8cd98f5dd": {
"ignore_unknown_instances": false,
"source_path": "a/main.lua",
"project_definition": null
},
"99eefe5f-ef74-49e6-8a8b-c833e00ca56b": {
"ignore_unknown_instances": true,
"source_path": null,
"project_definition": [
"ReplicatedStorage",
{
"class_name": "ReplicatedStorage",
"children": {
"Ack": {
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "a"
},
"Bar": {
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "b"
}
},
"properties": {},
"ignore_unknown_instances": null,
"path": null
}
]
},
"71a95983-c856-4cf2-aee6-bd8a523e80e4": {
"ignore_unknown_instances": false,
"source_path": "b/something.lua",
"project_definition": null
},
"c55fd55c-258e-4a93-a63a-ea243038c9b9": {
"ignore_unknown_instances": false,
"source_path": "a/foo.txt",
"project_definition": null
},
"3b5af13f-c997-4009-915c-0810b0e83032": {
"ignore_unknown_instances": true,
"source_path": null,
"project_definition": [
"multi_partition_game",
{
"class_name": "DataModel",
"children": {
"HttpService": {
"class_name": "HttpService",
"children": {},
"properties": {
"HttpEnabled": {
"Type": "Bool",
"Value": true
}
},
"ignore_unknown_instances": null,
"path": null
},
"ReplicatedStorage": {
"class_name": "ReplicatedStorage",
"children": {
"Ack": {
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "a"
},
"Bar": {
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "b"
}
},
"properties": {},
"ignore_unknown_instances": null,
"path": null
}
},
"properties": {},
"ignore_unknown_instances": null,
"path": null
}
]
}
}
}

View File

@@ -1,82 +0,0 @@
{
"name": "nested-partitions",
"class_name": "Folder",
"properties": {},
"children": [
{
"name": "inner",
"class_name": "Folder",
"properties": {},
"children": [
{
"name": "hello",
"class_name": "ModuleScript",
"properties": {
"Source": {
"Type": "String",
"Value": "-- inner/hello.lua"
}
},
"children": [],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "inner/hello.lua",
"project_definition": null
}
}
],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "inner",
"project_definition": [
"inner",
{
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "inner"
}
]
}
},
{
"name": "world",
"class_name": "ModuleScript",
"properties": {
"Source": {
"Type": "String",
"Value": "-- outer/world.lua"
}
},
"children": [],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "outer/world.lua",
"project_definition": null
}
}
],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "outer",
"project_definition": [
"nested-partitions",
{
"class_name": null,
"children": {
"inner": {
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "inner"
}
},
"properties": {},
"ignore_unknown_instances": null,
"path": "outer"
}
]
}
}

View File

@@ -1,161 +0,0 @@
{
"name": "single-sync-point",
"class_name": "DataModel",
"properties": {},
"children": [
{
"name": "HttpService",
"class_name": "HttpService",
"properties": {
"HttpEnabled": {
"Type": "Bool",
"Value": true
}
},
"children": [],
"metadata": {
"ignore_unknown_instances": true,
"source_path": null,
"project_definition": [
"HttpService",
{
"class_name": "HttpService",
"children": {},
"properties": {
"HttpEnabled": {
"Type": "Bool",
"Value": true
}
},
"ignore_unknown_instances": null,
"path": null
}
]
}
},
{
"name": "ReplicatedStorage",
"class_name": "ReplicatedStorage",
"properties": {},
"children": [
{
"name": "Foo",
"class_name": "Folder",
"properties": {},
"children": [
{
"name": "foo",
"class_name": "StringValue",
"properties": {
"Value": {
"Type": "String",
"Value": "Hello world, from foo.txt"
}
},
"children": [],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "lib/foo.txt",
"project_definition": null
}
},
{
"name": "main",
"class_name": "ModuleScript",
"properties": {
"Source": {
"Type": "String",
"Value": "-- hello, from main"
}
},
"children": [],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "lib/main.lua",
"project_definition": null
}
}
],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "lib",
"project_definition": [
"Foo",
{
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "lib"
}
]
}
}
],
"metadata": {
"ignore_unknown_instances": true,
"source_path": null,
"project_definition": [
"ReplicatedStorage",
{
"class_name": "ReplicatedStorage",
"children": {
"Foo": {
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "lib"
}
},
"properties": {},
"ignore_unknown_instances": null,
"path": null
}
]
}
}
],
"metadata": {
"ignore_unknown_instances": true,
"source_path": null,
"project_definition": [
"single-sync-point",
{
"class_name": "DataModel",
"children": {
"HttpService": {
"class_name": "HttpService",
"children": {},
"properties": {
"HttpEnabled": {
"Type": "Bool",
"Value": true
}
},
"ignore_unknown_instances": null,
"path": null
},
"ReplicatedStorage": {
"class_name": "ReplicatedStorage",
"children": {
"Foo": {
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "lib"
}
},
"properties": {},
"ignore_unknown_instances": null,
"path": null
}
},
"properties": {},
"ignore_unknown_instances": null,
"path": null
}
]
}
}

View File

@@ -1,53 +0,0 @@
{
"name": "test-model",
"class_name": "Folder",
"properties": {},
"children": [
{
"name": "main",
"class_name": "Script",
"properties": {
"Source": {
"Type": "String",
"Value": "local other = require(script.Parent.other)\n\nprint(other)"
}
},
"children": [],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "src/main.server.lua",
"project_definition": null
}
},
{
"name": "other",
"class_name": "ModuleScript",
"properties": {
"Source": {
"Type": "String",
"Value": "return \"Hello, world!\""
}
},
"children": [],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "src/other.lua",
"project_definition": null
}
}
],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "src",
"project_definition": [
"test-model",
{
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "src"
}
]
}
}

View File

@@ -1,266 +0,0 @@
{
"instances": {
"21406306-61c7-45f6-9e3e-50cd6b609214": {
"Name": "spawn_location",
"ClassName": "Folder",
"Properties": {},
"Id": "21406306-61c7-45f6-9e3e-50cd6b609214",
"Children": [
"da5dd362-4eb6-488a-984b-c96327927a70"
],
"Parent": null
},
"da5dd362-4eb6-488a-984b-c96327927a70": {
"Name": "spawn",
"ClassName": "SpawnLocation",
"Properties": {
"Size": {
"Type": "Vector3",
"Value": [
12.0,
1.0,
12.0
]
},
"Massless": {
"Type": "Bool",
"Value": false
},
"LeftSurface": {
"Type": "Enum",
"Value": 0
},
"Color": {
"Type": "Color3",
"Value": [
0.6392157,
0.63529414,
0.64705884
]
},
"RightSurfaceInput": {
"Type": "Enum",
"Value": 0
},
"Neutral": {
"Type": "Bool",
"Value": true
},
"Locked": {
"Type": "Bool",
"Value": false
},
"Anchored": {
"Type": "Bool",
"Value": true
},
"CanCollide": {
"Type": "Bool",
"Value": true
},
"FrontSurfaceInput": {
"Type": "Enum",
"Value": 0
},
"LeftSurfaceInput": {
"Type": "Enum",
"Value": 0
},
"RightSurface": {
"Type": "Enum",
"Value": 0
},
"FrontSurface": {
"Type": "Enum",
"Value": 0
},
"TopSurfaceInput": {
"Type": "Enum",
"Value": 0
},
"CastShadow": {
"Type": "Bool",
"Value": true
},
"BottomSurface": {
"Type": "Enum",
"Value": 0
},
"Duration": {
"Type": "Int32",
"Value": 10
},
"Velocity": {
"Type": "Vector3",
"Value": [
0.0,
0.0,
0.0
]
},
"BottomSurfaceInput": {
"Type": "Enum",
"Value": 0
},
"TopSurface": {
"Type": "Enum",
"Value": 0
},
"CFrame": {
"Type": "CFrame",
"Value": [
-6.0,
0.500001,
-11.0,
1.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
1.0
]
},
"TeamColor": {
"Type": "BrickColor",
"Value": 194
},
"RotVelocity": {
"Type": "Vector3",
"Value": [
0.0,
0.0,
0.0
]
},
"Reflectance": {
"Type": "Float32",
"Value": 0.0
},
"AllowTeamChangeOnTouch": {
"Type": "Bool",
"Value": false
},
"RightParamA": {
"Type": "Float32",
"Value": -0.5
},
"LeftParamA": {
"Type": "Float32",
"Value": -0.5
},
"Tags": {
"Type": "BinaryString",
"Value": ""
},
"Transparency": {
"Type": "Float32",
"Value": 0.0
},
"Shape": {
"Type": "Enum",
"Value": 1
},
"FrontParamA": {
"Type": "Float32",
"Value": -0.5
},
"RootPriority": {
"Type": "Int32",
"Value": 0
},
"FrontParamB": {
"Type": "Float32",
"Value": 0.5
},
"FormFactor": {
"Type": "Enum",
"Value": 1
},
"BottomParamB": {
"Type": "Float32",
"Value": 0.5
},
"BackSurface": {
"Type": "Enum",
"Value": 0
},
"BottomParamA": {
"Type": "Float32",
"Value": -0.5
},
"Enabled": {
"Type": "Bool",
"Value": true
},
"BackParamB": {
"Type": "Float32",
"Value": 0.5
},
"BackParamA": {
"Type": "Float32",
"Value": -0.5
},
"LeftParamB": {
"Type": "Float32",
"Value": 0.5
},
"TopParamA": {
"Type": "Float32",
"Value": -0.5
},
"RightParamB": {
"Type": "Float32",
"Value": 0.5
},
"CollisionGroupId": {
"Type": "Int32",
"Value": 0
},
"BackSurfaceInput": {
"Type": "Enum",
"Value": 0
},
"CustomPhysicalProperties": {
"Type": "PhysicalProperties",
"Value": null
},
"TopParamB": {
"Type": "Float32",
"Value": 0.5
},
"Material": {
"Type": "Enum",
"Value": 256
}
},
"Id": "da5dd362-4eb6-488a-984b-c96327927a70",
"Children": [],
"Parent": "21406306-61c7-45f6-9e3e-50cd6b609214"
}
},
"root_id": "21406306-61c7-45f6-9e3e-50cd6b609214",
"metadata": {
"da5dd362-4eb6-488a-984b-c96327927a70": {
"ignore_unknown_instances": false,
"source_path": "src/spawn.rbxmx",
"project_definition": null
},
"21406306-61c7-45f6-9e3e-50cd6b609214": {
"ignore_unknown_instances": false,
"source_path": "src",
"project_definition": [
"spawn_location",
{
"class_name": null,
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "src"
}
]
}
}
}

View File

@@ -1,66 +0,0 @@
{
"name": "transmute-partition",
"class_name": "DataModel",
"properties": {},
"children": [
{
"name": "ReplicatedStorage",
"class_name": "ReplicatedStorage",
"properties": {},
"children": [
{
"name": "hello",
"class_name": "ModuleScript",
"properties": {
"Source": {
"Type": "String",
"Value": "-- ReplicatedStorage/hello.lua"
}
},
"children": [],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "ReplicatedStorage/hello.lua",
"project_definition": null
}
}
],
"metadata": {
"ignore_unknown_instances": false,
"source_path": "ReplicatedStorage",
"project_definition": [
"ReplicatedStorage",
{
"class_name": "ReplicatedStorage",
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "ReplicatedStorage"
}
]
}
}
],
"metadata": {
"ignore_unknown_instances": true,
"source_path": null,
"project_definition": [
"transmute-partition",
{
"class_name": "DataModel",
"children": {
"ReplicatedStorage": {
"class_name": "ReplicatedStorage",
"children": {},
"properties": {},
"ignore_unknown_instances": null,
"path": "ReplicatedStorage"
}
},
"properties": {},
"ignore_unknown_instances": null,
"path": null
}
]
}
}

View File

@@ -0,0 +1,6 @@
{
"name": "empty_folder",
"tree": {
"$path": "src"
}
}

View File

@@ -0,0 +1 @@
Hello, from foo.txt!