Compare commits

...

11 Commits

Author SHA1 Message Date
Micah
825726c883 Release 7.6.1 (#1151) 2025-11-06 18:49:05 -08:00
boatbomber
54e63d88d4 Slightly improve initial sync hangs (#1140) 2025-11-06 00:06:42 -08:00
boatbomber
4018c97cb6 Make CHANGELOG.md use consistent style (#1146) 2025-10-28 19:26:48 -07:00
boatbomber
d0b029f995 Add JSONC Support for Project, Meta, and Model JSON files (#1144)
Replaces `serde_json` parsing with `jsonc-parser` throughout the
codebase, enabling support for **comments** and **trailing commas** in
all JSON files including `.project.json`, `.model.json`, and
`.meta.json` files.
MSRV bumps from `1.83.0` to `1.88.0` in order to
use the jsonc_parser dependency.
2025-10-28 17:29:57 -07:00
Sebastian Stachowicz
aabe6d11b2 Update default gitignores to include sourcemap (#1145) 2025-10-28 17:28:55 -07:00
boatbomber
181cc37744 Improve sync fallback robustness (#1135) 2025-10-20 20:13:47 -07:00
boatbomber
cd78f5c02c Fix postcommit callbacks being skipped (#1132) 2025-10-14 12:13:59 -07:00
Micah
441c469966 Release Rojo v7.6.0 (#1125) 2025-10-10 19:17:55 -07:00
Micah
f3c423d77d Fix the various lints (#1124) 2025-10-10 13:00:56 -07:00
Micah
beb497878b Add flag for skipping git initialization to init command (#1122) 2025-10-07 17:12:22 -07:00
Micah
6ea95d487c Refactor init command (#1117) 2025-09-30 14:38:38 -07:00
48 changed files with 9259 additions and 1459 deletions

View File

@@ -60,7 +60,7 @@ jobs:
submodules: true submodules: true
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@1.79.0 uses: dtolnay/rust-toolchain@1.88.0
- name: Restore Rust Cache - name: Restore Rust Cache
uses: actions/cache/restore@v4 uses: actions/cache/restore@v4

File diff suppressed because it is too large Load Diff

179
Cargo.lock generated
View File

@@ -1,6 +1,6 @@
# This file is automatically @generated by Cargo. # This file is automatically @generated by Cargo.
# It is not intended for manual editing. # It is not intended for manual editing.
version = 3 version = 4
[[package]] [[package]]
name = "addr2line" name = "addr2line"
@@ -243,7 +243,7 @@ checksum = "ae6371b8bdc8b7d3959e9cf7b22d4435ef3e79e138688421ec654acf8c81b008"
dependencies = [ dependencies = [
"heck", "heck",
"proc-macro-error", "proc-macro-error",
"proc-macro2 1.0.78", "proc-macro2 1.0.103",
"quote 1.0.35", "quote 1.0.35",
"syn 1.0.109", "syn 1.0.109",
] ]
@@ -430,7 +430,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13aea89a5c93364a98e9b37b2fa237effbb694d5cfe01c5b70941f7eb087d5e3" checksum = "13aea89a5c93364a98e9b37b2fa237effbb694d5cfe01c5b70941f7eb087d5e3"
dependencies = [ dependencies = [
"cfg-if 0.1.10", "cfg-if 0.1.10",
"dirs-sys", "dirs-sys 0.3.7",
]
[[package]]
name = "dirs"
version = "5.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225"
dependencies = [
"dirs-sys 0.4.1",
] ]
[[package]] [[package]]
@@ -444,6 +453,18 @@ dependencies = [
"winapi 0.3.9", "winapi 0.3.9",
] ]
[[package]]
name = "dirs-sys"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c"
dependencies = [
"libc",
"option-ext",
"redox_users",
"windows-sys 0.48.0",
]
[[package]] [[package]]
name = "either" name = "either"
version = "1.10.0" version = "1.10.0"
@@ -644,9 +665,9 @@ version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac"
dependencies = [ dependencies = [
"proc-macro2 1.0.78", "proc-macro2 1.0.103",
"quote 1.0.35", "quote 1.0.35",
"syn 2.0.52", "syn 2.0.108",
] ]
[[package]] [[package]]
@@ -1012,6 +1033,15 @@ dependencies = [
"wasm-bindgen", "wasm-bindgen",
] ]
[[package]]
name = "jsonc-parser"
version = "0.27.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ec4ac49f13c7b00f435f8a5bb55d725705e2cf620df35a5859321595102eb7e"
dependencies = [
"serde_json",
]
[[package]] [[package]]
name = "kernel32-sys" name = "kernel32-sys"
version = "0.2.2" version = "0.2.2"
@@ -1093,23 +1123,12 @@ dependencies = [
] ]
[[package]] [[package]]
name = "lz4" name = "lz4_flex"
version = "1.24.0" version = "0.11.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e9e2dd86df36ce760a60f6ff6ad526f7ba1f14ba0356f8254fb6905e6494df1" checksum = "08ab2867e3eeeca90e844d1940eab391c9dc5228783db2ed999acbc0a9ed375a"
dependencies = [ dependencies = [
"libc", "twox-hash",
"lz4-sys",
]
[[package]]
name = "lz4-sys"
version = "1.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57d27b317e207b10f69f5e75494119e391a96f48861ae870d1da6edac98ca900"
dependencies = [
"cc",
"libc",
] ]
[[package]] [[package]]
@@ -1301,6 +1320,12 @@ dependencies = [
"winapi 0.3.9", "winapi 0.3.9",
] ]
[[package]]
name = "option-ext"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
[[package]] [[package]]
name = "os_str_bytes" name = "os_str_bytes"
version = "6.6.1" version = "6.6.1"
@@ -1377,9 +1402,9 @@ checksum = "fdc17e2a6c7d0a492f0158d7a4bd66cc17280308bbaff78d5bef566dca35ab80"
dependencies = [ dependencies = [
"pest", "pest",
"pest_meta", "pest_meta",
"proc-macro2 1.0.78", "proc-macro2 1.0.103",
"quote 1.0.35", "quote 1.0.35",
"syn 2.0.52", "syn 2.0.108",
] ]
[[package]] [[package]]
@@ -1462,7 +1487,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [ dependencies = [
"proc-macro-error-attr", "proc-macro-error-attr",
"proc-macro2 1.0.78", "proc-macro2 1.0.103",
"quote 1.0.35", "quote 1.0.35",
"syn 1.0.109", "syn 1.0.109",
"version_check", "version_check",
@@ -1474,7 +1499,7 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [ dependencies = [
"proc-macro2 1.0.78", "proc-macro2 1.0.103",
"quote 1.0.35", "quote 1.0.35",
"version_check", "version_check",
] ]
@@ -1502,9 +1527,9 @@ dependencies = [
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.78" version = "1.0.103"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8"
dependencies = [ dependencies = [
"unicode-ident", "unicode-ident",
] ]
@@ -1526,7 +1551,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8021cf59c8ec9c432cfc2526ac6b8aa508ecaf29cd415f271b8406c1b851c3fd" checksum = "8021cf59c8ec9c432cfc2526ac6b8aa508ecaf29cd415f271b8406c1b851c3fd"
dependencies = [ dependencies = [
"quote 1.0.35", "quote 1.0.35",
"syn 2.0.52", "syn 2.0.108",
] ]
[[package]] [[package]]
@@ -1544,7 +1569,7 @@ version = "1.0.35"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
dependencies = [ dependencies = [
"proc-macro2 1.0.78", "proc-macro2 1.0.103",
] ]
[[package]] [[package]]
@@ -1599,13 +1624,13 @@ dependencies = [
[[package]] [[package]]
name = "rbx_binary" name = "rbx_binary"
version = "1.0.0" version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9573fee5e073d7b303f475c285197fdc8179468de66ca60ee115a58fbac99296" checksum = "0d419f67c8012bf83569086e1208c541478b3b8e4f523deaa0b80d723fb5ef22"
dependencies = [ dependencies = [
"ahash", "ahash",
"log", "log",
"lz4", "lz4_flex",
"profiling", "profiling",
"rbx_dom_weak", "rbx_dom_weak",
"rbx_reflection", "rbx_reflection",
@@ -1616,9 +1641,9 @@ dependencies = [
[[package]] [[package]]
name = "rbx_dom_weak" name = "rbx_dom_weak"
version = "3.0.0" version = "4.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04425cf6e9376e5486f4fb35906c120d1b1b45618a490318cf563fab1fa230a9" checksum = "bc74878a4a801afc8014b14ede4b38015a13de5d29ab0095d5ed284a744253f6"
dependencies = [ dependencies = [
"ahash", "ahash",
"rbx_types", "rbx_types",
@@ -1628,9 +1653,9 @@ dependencies = [
[[package]] [[package]]
name = "rbx_reflection" name = "rbx_reflection"
version = "5.0.0" version = "6.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b6d0d62baa613556b058a5f94a53b01cf0ccde0ea327ce03056e335b982e77e" checksum = "565dd3430991f35443fa6d23cc239fade2110c5089deb6bae5de77c400df4fd2"
dependencies = [ dependencies = [
"rbx_types", "rbx_types",
"serde", "serde",
@@ -1639,11 +1664,12 @@ dependencies = [
[[package]] [[package]]
name = "rbx_reflection_database" name = "rbx_reflection_database"
version = "1.0.3+roblox-670" version = "2.0.1+roblox-697"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e22c05ef92528c0fb0cc580592a65ca178d3ea9beb07a1d9ca0a2503c4f3721c" checksum = "d69035a14b103c5a9b8bc6a61d30f4ee6f2608afdee137dae09b26037dba5dc8"
dependencies = [ dependencies = [
"lazy_static", "dirs 5.0.1",
"log",
"rbx_reflection", "rbx_reflection",
"rmp-serde", "rmp-serde",
"serde", "serde",
@@ -1651,9 +1677,9 @@ dependencies = [
[[package]] [[package]]
name = "rbx_types" name = "rbx_types"
version = "2.0.0" version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78e4fdde46493def107e5f923d82e813dec9b3eef52c2f75fbad3a716023eda2" checksum = "03220ffce2bd06ad04f77a003cb807f2e5b2a18e97623066a5ac735a978398af"
dependencies = [ dependencies = [
"base64 0.13.1", "base64 0.13.1",
"bitflags 1.3.2", "bitflags 1.3.2",
@@ -1666,9 +1692,9 @@ dependencies = [
[[package]] [[package]]
name = "rbx_xml" name = "rbx_xml"
version = "1.0.0" version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb623833c31cc43bbdaeb32f5e91db8ecd63fc46e438d0d268baf9e61539cf1c" checksum = "be6c302cefe9c92ed09bcbb075cd24379271de135b0af331409a64c2ea3646ee"
dependencies = [ dependencies = [
"ahash", "ahash",
"base64 0.13.1", "base64 0.13.1",
@@ -1860,14 +1886,14 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "743bb8c693a387f1ae8d2026d82d8b0c175cc4777b97c1f7b12fdb3be595bb13" checksum = "743bb8c693a387f1ae8d2026d82d8b0c175cc4777b97c1f7b12fdb3be595bb13"
dependencies = [ dependencies = [
"dirs", "dirs 2.0.2",
"thiserror", "thiserror",
"winreg 0.6.2", "winreg 0.6.2",
] ]
[[package]] [[package]]
name = "rojo" name = "rojo"
version = "7.5.1" version = "7.6.1"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"backtrace", "backtrace",
@@ -1886,6 +1912,7 @@ dependencies = [
"hyper", "hyper",
"insta", "insta",
"jod-thread", "jod-thread",
"jsonc-parser",
"log", "log",
"maplit", "maplit",
"memofs", "memofs",
@@ -2037,10 +2064,11 @@ checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca"
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.197" version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
dependencies = [ dependencies = [
"serde_core",
"serde_derive", "serde_derive",
] ]
@@ -2055,25 +2083,36 @@ dependencies = [
] ]
[[package]] [[package]]
name = "serde_derive" name = "serde_core"
version = "1.0.197" version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
dependencies = [ dependencies = [
"proc-macro2 1.0.78", "serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
dependencies = [
"proc-macro2 1.0.103",
"quote 1.0.35", "quote 1.0.35",
"syn 2.0.52", "syn 2.0.108",
] ]
[[package]] [[package]]
name = "serde_json" name = "serde_json"
version = "1.0.114" version = "1.0.145"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c"
dependencies = [ dependencies = [
"itoa", "itoa",
"memchr",
"ryu", "ryu",
"serde", "serde",
"serde_core",
] ]
[[package]] [[package]]
@@ -2178,18 +2217,18 @@ version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [ dependencies = [
"proc-macro2 1.0.78", "proc-macro2 1.0.103",
"quote 1.0.35", "quote 1.0.35",
"unicode-ident", "unicode-ident",
] ]
[[package]] [[package]]
name = "syn" name = "syn"
version = "2.0.52" version = "2.0.108"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b699d15b36d1f02c3e7c69f8ffef53de37aefae075d8488d4ba1a7788d574a07" checksum = "da58917d35242480a05c2897064da0a80589a2a0476c9a3f2fdc83b53502e917"
dependencies = [ dependencies = [
"proc-macro2 1.0.78", "proc-macro2 1.0.103",
"quote 1.0.35", "quote 1.0.35",
"unicode-ident", "unicode-ident",
] ]
@@ -2272,9 +2311,9 @@ version = "1.0.57"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81"
dependencies = [ dependencies = [
"proc-macro2 1.0.78", "proc-macro2 1.0.103",
"quote 1.0.35", "quote 1.0.35",
"syn 2.0.52", "syn 2.0.108",
] ]
[[package]] [[package]]
@@ -2384,9 +2423,9 @@ version = "0.1.27"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [ dependencies = [
"proc-macro2 1.0.78", "proc-macro2 1.0.103",
"quote 1.0.35", "quote 1.0.35",
"syn 2.0.52", "syn 2.0.108",
] ]
[[package]] [[package]]
@@ -2454,6 +2493,12 @@ version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
[[package]]
name = "twox-hash"
version = "2.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ea3136b675547379c4bd395ca6b938e5ad3c3d20fad76e7fe85f9e0d011419c"
[[package]] [[package]]
name = "typenum" name = "typenum"
version = "1.17.0" version = "1.17.0"
@@ -2615,9 +2660,9 @@ dependencies = [
"bumpalo", "bumpalo",
"log", "log",
"once_cell", "once_cell",
"proc-macro2 1.0.78", "proc-macro2 1.0.103",
"quote 1.0.35", "quote 1.0.35",
"syn 2.0.52", "syn 2.0.108",
"wasm-bindgen-shared", "wasm-bindgen-shared",
] ]
@@ -2649,9 +2694,9 @@ version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
dependencies = [ dependencies = [
"proc-macro2 1.0.78", "proc-macro2 1.0.103",
"quote 1.0.35", "quote 1.0.35",
"syn 2.0.52", "syn 2.0.108",
"wasm-bindgen-backend", "wasm-bindgen-backend",
"wasm-bindgen-shared", "wasm-bindgen-shared",
] ]
@@ -2947,9 +2992,9 @@ version = "0.7.35"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
dependencies = [ dependencies = [
"proc-macro2 1.0.78", "proc-macro2 1.0.103",
"quote 1.0.35", "quote 1.0.35",
"syn 2.0.52", "syn 2.0.108",
] ]
[[package]] [[package]]

View File

@@ -1,7 +1,7 @@
[package] [package]
name = "rojo" name = "rojo"
version = "7.5.1" version = "7.6.1"
rust-version = "1.79.0" rust-version = "1.88"
authors = [ authors = [
"Lucien Greathouse <me@lpghatguy.com>", "Lucien Greathouse <me@lpghatguy.com>",
"Micah Reid <git@dekkonot.com>", "Micah Reid <git@dekkonot.com>",
@@ -55,11 +55,11 @@ memofs = { version = "0.3.0", path = "crates/memofs" }
# rbx_reflection_database = { path = "../rbx-dom/rbx_reflection_database" } # rbx_reflection_database = { path = "../rbx-dom/rbx_reflection_database" }
# rbx_xml = { path = "../rbx-dom/rbx_xml" } # rbx_xml = { path = "../rbx-dom/rbx_xml" }
rbx_binary = "1.0.0" rbx_binary = "2.0.0"
rbx_dom_weak = "3.0.0" rbx_dom_weak = "4.0.0"
rbx_reflection = "5.0.0" rbx_reflection = "6.0.0"
rbx_reflection_database = "1.0.3" rbx_reflection_database = "2.0.1"
rbx_xml = "1.0.0" rbx_xml = "2.0.0"
anyhow = "1.0.80" anyhow = "1.0.80"
backtrace = "0.3.69" backtrace = "0.3.69"
@@ -85,7 +85,8 @@ reqwest = { version = "0.11.24", default-features = false, features = [
ritz = "0.1.0" ritz = "0.1.0"
roblox_install = "1.0.0" roblox_install = "1.0.0"
serde = { version = "1.0.197", features = ["derive", "rc"] } serde = { version = "1.0.197", features = ["derive", "rc"] }
serde_json = "1.0.114" serde_json = "1.0.145"
jsonc-parser = { version = "0.27.0", features = ["serde"] }
toml = "0.5.11" toml = "0.5.11"
termcolor = "1.4.1" termcolor = "1.4.1"
thiserror = "1.0.57" thiserror = "1.0.57"

View File

@@ -40,7 +40,7 @@ Check out our [contribution guide](CONTRIBUTING.md) for detailed instructions fo
Pull requests are welcome! Pull requests are welcome!
Rojo supports Rust 1.70.0 and newer. The minimum supported version of Rust is based on the latest versions of the dependencies that Rojo has. Rojo supports Rust 1.88 and newer. The minimum supported version of Rust is based on the latest versions of the dependencies that Rojo has.
## License ## License
Rojo is available under the terms of the Mozilla Public License, Version 2.0. See [LICENSE.txt](LICENSE.txt) for details. Rojo is available under the terms of the Mozilla Public License, Version 2.0. See [LICENSE.txt](LICENSE.txt) for details.

View File

@@ -1,3 +1,5 @@
# Roblox Studio lock files # Roblox Studio lock files
/*.rbxlx.lock /*.rbxlx.lock
/*.rbxl.lock /*.rbxl.lock
sourcemap.json

View File

@@ -4,3 +4,5 @@
# Roblox Studio lock files # Roblox Studio lock files
/*.rbxlx.lock /*.rbxlx.lock
/*.rbxl.lock /*.rbxl.lock
sourcemap.json

View File

@@ -0,0 +1 @@
print("Hello world, from client!")

View File

@@ -0,0 +1 @@
print("Hello world, from server!")

View File

@@ -0,0 +1,3 @@
return function()
print("Hello, world!")
end

View File

@@ -1,3 +1,5 @@
# Plugin model files # Plugin model files
/{project_name}.rbxmx /{project_name}.rbxmx
/{project_name}.rbxm /{project_name}.rbxm
sourcemap.json

View File

@@ -0,0 +1 @@
print("Hello world, from plugin!")

View File

@@ -47,6 +47,7 @@ fn main() -> Result<(), anyhow::Error> {
let root_dir = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()); let root_dir = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
let plugin_dir = root_dir.join("plugin"); let plugin_dir = root_dir.join("plugin");
let templates_dir = root_dir.join("assets").join("project-templates");
let our_version = Version::parse(env::var_os("CARGO_PKG_VERSION").unwrap().to_str().unwrap())?; let our_version = Version::parse(env::var_os("CARGO_PKG_VERSION").unwrap().to_str().unwrap())?;
let plugin_version = let plugin_version =
@@ -57,7 +58,9 @@ fn main() -> Result<(), anyhow::Error> {
"plugin version does not match Cargo version" "plugin version does not match Cargo version"
); );
let snapshot = VfsSnapshot::dir(hashmap! { let template_snapshot = snapshot_from_fs_path(&templates_dir)?;
let plugin_snapshot = VfsSnapshot::dir(hashmap! {
"default.project.json" => snapshot_from_fs_path(&root_dir.join("plugin.project.json"))?, "default.project.json" => snapshot_from_fs_path(&root_dir.join("plugin.project.json"))?,
"plugin" => VfsSnapshot::dir(hashmap! { "plugin" => VfsSnapshot::dir(hashmap! {
"fmt" => snapshot_from_fs_path(&plugin_dir.join("fmt"))?, "fmt" => snapshot_from_fs_path(&plugin_dir.join("fmt"))?,
@@ -70,10 +73,11 @@ fn main() -> Result<(), anyhow::Error> {
}), }),
}); });
let out_path = Path::new(&out_dir).join("plugin.bincode"); let template_file = File::create(Path::new(&out_dir).join("templates.bincode"))?;
let out_file = File::create(out_path)?; let plugin_file = File::create(Path::new(&out_dir).join("plugin.bincode"))?;
bincode::serialize_into(out_file, &snapshot)?; bincode::serialize_into(plugin_file, &plugin_snapshot)?;
bincode::serialize_into(template_file, &template_snapshot)?;
println!("cargo:rerun-if-changed=build/windows/rojo-manifest.rc"); println!("cargo:rerun-if-changed=build/windows/rojo-manifest.rc");
println!("cargo:rerun-if-changed=build/windows/rojo.manifest"); println!("cargo:rerun-if-changed=build/windows/rojo.manifest");

View File

@@ -228,23 +228,17 @@ impl VfsBackend for InMemoryFs {
} }
fn must_be_file<T>(path: &Path) -> io::Result<T> { fn must_be_file<T>(path: &Path) -> io::Result<T> {
Err(io::Error::new( Err(io::Error::other(format!(
io::ErrorKind::Other, "path {} was a directory, but must be a file",
format!( path.display()
"path {} was a directory, but must be a file", )))
path.display()
),
))
} }
fn must_be_dir<T>(path: &Path) -> io::Result<T> { fn must_be_dir<T>(path: &Path) -> io::Result<T> {
Err(io::Error::new( Err(io::Error::other(format!(
io::ErrorKind::Other, "path {} was a file, but must be a directory",
format!( path.display()
"path {} was a file, but must be a directory", )))
path.display()
),
))
} }
fn not_found<T>(path: &Path) -> io::Result<T> { fn not_found<T>(path: &Path) -> io::Result<T> {

View File

@@ -15,45 +15,27 @@ impl NoopBackend {
impl VfsBackend for NoopBackend { impl VfsBackend for NoopBackend {
fn read(&mut self, _path: &Path) -> io::Result<Vec<u8>> { fn read(&mut self, _path: &Path) -> io::Result<Vec<u8>> {
Err(io::Error::new( Err(io::Error::other("NoopBackend doesn't do anything"))
io::ErrorKind::Other,
"NoopBackend doesn't do anything",
))
} }
fn write(&mut self, _path: &Path, _data: &[u8]) -> io::Result<()> { fn write(&mut self, _path: &Path, _data: &[u8]) -> io::Result<()> {
Err(io::Error::new( Err(io::Error::other("NoopBackend doesn't do anything"))
io::ErrorKind::Other,
"NoopBackend doesn't do anything",
))
} }
fn read_dir(&mut self, _path: &Path) -> io::Result<ReadDir> { fn read_dir(&mut self, _path: &Path) -> io::Result<ReadDir> {
Err(io::Error::new( Err(io::Error::other("NoopBackend doesn't do anything"))
io::ErrorKind::Other,
"NoopBackend doesn't do anything",
))
} }
fn remove_file(&mut self, _path: &Path) -> io::Result<()> { fn remove_file(&mut self, _path: &Path) -> io::Result<()> {
Err(io::Error::new( Err(io::Error::other("NoopBackend doesn't do anything"))
io::ErrorKind::Other,
"NoopBackend doesn't do anything",
))
} }
fn remove_dir_all(&mut self, _path: &Path) -> io::Result<()> { fn remove_dir_all(&mut self, _path: &Path) -> io::Result<()> {
Err(io::Error::new( Err(io::Error::other("NoopBackend doesn't do anything"))
io::ErrorKind::Other,
"NoopBackend doesn't do anything",
))
} }
fn metadata(&mut self, _path: &Path) -> io::Result<Metadata> { fn metadata(&mut self, _path: &Path) -> io::Result<Metadata> {
Err(io::Error::new( Err(io::Error::other("NoopBackend doesn't do anything"))
io::ErrorKind::Other,
"NoopBackend doesn't do anything",
))
} }
fn event_receiver(&self) -> crossbeam_channel::Receiver<VfsEvent> { fn event_receiver(&self) -> crossbeam_channel::Receiver<VfsEvent> {
@@ -61,17 +43,11 @@ impl VfsBackend for NoopBackend {
} }
fn watch(&mut self, _path: &Path) -> io::Result<()> { fn watch(&mut self, _path: &Path) -> io::Result<()> {
Err(io::Error::new( Err(io::Error::other("NoopBackend doesn't do anything"))
io::ErrorKind::Other,
"NoopBackend doesn't do anything",
))
} }
fn unwatch(&mut self, _path: &Path) -> io::Result<()> { fn unwatch(&mut self, _path: &Path) -> io::Result<()> {
Err(io::Error::new( Err(io::Error::other("NoopBackend doesn't do anything"))
io::ErrorKind::Other,
"NoopBackend doesn't do anything",
))
} }
} }

View File

@@ -109,15 +109,13 @@ impl VfsBackend for StdBackend {
self.watches.insert(path.to_path_buf()); self.watches.insert(path.to_path_buf());
self.watcher self.watcher
.watch(path, RecursiveMode::Recursive) .watch(path, RecursiveMode::Recursive)
.map_err(|inner| io::Error::new(io::ErrorKind::Other, inner)) .map_err(io::Error::other)
} }
} }
fn unwatch(&mut self, path: &Path) -> io::Result<()> { fn unwatch(&mut self, path: &Path) -> io::Result<()> {
self.watches.remove(path); self.watches.remove(path);
self.watcher self.watcher.unwatch(path).map_err(io::Error::other)
.unwatch(path)
.map_err(|inner| io::Error::new(io::ErrorKind::Other, inner))
} }
} }

View File

@@ -1 +1 @@
7.5.1 7.6.1

View File

@@ -378,13 +378,26 @@ types = {
if pod == "Default" then if pod == "Default" then
return nil return nil
else else
return PhysicalProperties.new( -- Passing `nil` instead of not passing anything gives
pod.density, -- different results, so we have to branch here.
pod.friction, if pod.acousticAbsorption then
pod.elasticity, return (PhysicalProperties.new :: any)(
pod.frictionWeight, pod.density,
pod.elasticityWeight pod.friction,
) pod.elasticity,
pod.frictionWeight,
pod.elasticityWeight,
pod.acousticAbsorption
)
else
return PhysicalProperties.new(
pod.density,
pod.friction,
pod.elasticity,
pod.frictionWeight,
pod.elasticityWeight
)
end
end end
end, end,
@@ -398,6 +411,7 @@ types = {
elasticity = roblox.Elasticity, elasticity = roblox.Elasticity,
frictionWeight = roblox.FrictionWeight, frictionWeight = roblox.FrictionWeight,
elasticityWeight = roblox.ElasticityWeight, elasticityWeight = roblox.ElasticityWeight,
acousticAbsorption = roblox.AcousticAbsorption,
} }
end end
end, end,

View File

@@ -441,7 +441,8 @@
"friction": 1.0, "friction": 1.0,
"elasticity": 0.0, "elasticity": 0.0,
"frictionWeight": 50.0, "frictionWeight": 50.0,
"elasticityWeight": 25.0 "elasticityWeight": 25.0,
"acousticAbsorption": 0.15625
} }
}, },
"ty": "PhysicalProperties" "ty": "PhysicalProperties"

File diff suppressed because it is too large Load Diff

View File

@@ -4,8 +4,6 @@ local Packages = Rojo.Packages
local Roact = require(Packages.Roact) local Roact = require(Packages.Roact)
local Timer = require(Plugin.Timer)
local PatchTree = require(Plugin.PatchTree)
local Settings = require(Plugin.Settings) local Settings = require(Plugin.Settings)
local Theme = require(Plugin.App.Theme) local Theme = require(Plugin.App.Theme)
local TextButton = require(Plugin.App.Components.TextButton) local TextButton = require(Plugin.App.Components.TextButton)
@@ -24,7 +22,6 @@ function ConfirmingPage:init()
self.containerSize, self.setContainerSize = Roact.createBinding(Vector2.new(0, 0)) self.containerSize, self.setContainerSize = Roact.createBinding(Vector2.new(0, 0))
self:setState({ self:setState({
patchTree = nil,
showingStringDiff = false, showingStringDiff = false,
oldString = "", oldString = "",
newString = "", newString = "",
@@ -32,28 +29,6 @@ function ConfirmingPage:init()
oldTable = {}, oldTable = {},
newTable = {}, newTable = {},
}) })
if self.props.confirmData and self.props.confirmData.patch and self.props.confirmData.instanceMap then
self:buildPatchTree()
end
end
function ConfirmingPage:didUpdate(prevProps)
if prevProps.confirmData ~= self.props.confirmData then
self:buildPatchTree()
end
end
function ConfirmingPage:buildPatchTree()
Timer.start("ConfirmingPage:buildPatchTree")
self:setState({
patchTree = PatchTree.build(
self.props.confirmData.patch,
self.props.confirmData.instanceMap,
{ "Property", "Current", "Incoming" }
),
})
Timer.stop()
end end
function ConfirmingPage:render() function ConfirmingPage:render()
@@ -79,7 +54,7 @@ function ConfirmingPage:render()
transparency = self.props.transparency, transparency = self.props.transparency,
layoutOrder = 3, layoutOrder = 3,
patchTree = self.state.patchTree, patchTree = self.props.patchTree,
showStringDiff = function(oldString: string, newString: string) showStringDiff = function(oldString: string, newString: string)
self:setState({ self:setState({

View File

@@ -4,6 +4,8 @@ local Packages = Rojo.Packages
local Roact = require(Packages.Roact) local Roact = require(Packages.Roact)
local Theme = require(Plugin.App.Theme)
local Spinner = require(Plugin.App.Components.Spinner) local Spinner = require(Plugin.App.Components.Spinner)
local e = Roact.createElement local e = Roact.createElement
@@ -11,11 +13,35 @@ local e = Roact.createElement
local ConnectingPage = Roact.Component:extend("ConnectingPage") local ConnectingPage = Roact.Component:extend("ConnectingPage")
function ConnectingPage:render() function ConnectingPage:render()
return e(Spinner, { return Theme.with(function(theme)
position = UDim2.new(0.5, 0, 0.5, 0), return e("Frame", {
anchorPoint = Vector2.new(0.5, 0.5), Size = UDim2.new(1, 0, 1, 0),
transparency = self.props.transparency, BackgroundTransparency = 1,
}) }, {
Spinner = e(Spinner, {
position = UDim2.new(0.5, 0, 0.5, 0),
anchorPoint = Vector2.new(0.5, 0.5),
transparency = self.props.transparency,
}),
Text = if type(self.props.text) == "string" and #self.props.text > 0
then e("TextLabel", {
Text = self.props.text,
Position = UDim2.new(0.5, 0, 0.5, 30),
Size = UDim2.new(1, -40, 0.5, -40),
AnchorPoint = Vector2.new(0.5, 0),
TextXAlignment = Enum.TextXAlignment.Center,
TextYAlignment = Enum.TextYAlignment.Top,
RichText = true,
FontFace = theme.Font.Thin,
TextSize = theme.TextSize.Medium,
TextColor3 = theme.SubTextColor,
TextTruncate = Enum.TextTruncate.AtEnd,
TextTransparency = self.props.transparency,
BackgroundTransparency = 1,
})
else nil,
})
end)
end end
return ConnectingPage return ConnectingPage

View File

@@ -595,6 +595,12 @@ function App:startSession()
twoWaySync = Settings:get("twoWaySync"), twoWaySync = Settings:get("twoWaySync"),
}) })
serveSession:setUpdateLoadingTextCallback(function(text: string)
self:setState({
connectingText = text,
})
end)
self.cleanupPrecommit = serveSession:hookPrecommit(function(patch, instanceMap) self.cleanupPrecommit = serveSession:hookPrecommit(function(patch, instanceMap)
-- Build new tree for patch -- Build new tree for patch
self:setState({ self:setState({
@@ -602,46 +608,32 @@ function App:startSession()
}) })
end) end)
self.cleanupPostcommit = serveSession:hookPostcommit(function(patch, instanceMap, unappliedPatch) self.cleanupPostcommit = serveSession:hookPostcommit(function(patch, instanceMap, unappliedPatch)
-- Update tree with unapplied metadata local now = DateTime.now().UnixTimestamp
self:setState(function(prevState) self:setState(function(prevState)
local oldPatchData = prevState.patchData
local newPatchData = {
patch = patch,
unapplied = unappliedPatch,
timestamp = now,
}
if PatchSet.isEmpty(patch) then
-- Keep existing patch info, but use new timestamp
newPatchData.patch = oldPatchData.patch
newPatchData.unapplied = oldPatchData.unapplied
elseif now - oldPatchData.timestamp < 2 then
-- Patches that apply in the same second are combined for human clarity
newPatchData.patch = PatchSet.assign(PatchSet.newEmpty(), oldPatchData.patch, patch)
newPatchData.unapplied = PatchSet.assign(PatchSet.newEmpty(), oldPatchData.unapplied, unappliedPatch)
end
return { return {
patchTree = PatchTree.updateMetadata(prevState.patchTree, patch, instanceMap, unappliedPatch), patchTree = PatchTree.updateMetadata(prevState.patchTree, patch, instanceMap, unappliedPatch),
patchData = newPatchData,
} }
end) end)
end) end)
serveSession:hookPostcommit(function(patch, _instanceMap, unapplied)
local now = DateTime.now().UnixTimestamp
local old = self.state.patchData
if PatchSet.isEmpty(patch) then
-- Ignore empty patch, but update timestamp
self:setState({
patchData = {
patch = old.patch,
unapplied = old.unapplied,
timestamp = now,
},
})
return
end
if now - old.timestamp < 2 then
-- Patches that apply in the same second are
-- considered to be part of the same change for human clarity
patch = PatchSet.assign(PatchSet.newEmpty(), old.patch, patch)
unapplied = PatchSet.assign(PatchSet.newEmpty(), old.unapplied, unapplied)
end
self:setState({
patchData = {
patch = patch,
unapplied = unapplied,
timestamp = now,
},
})
end)
serveSession:onStatusChanged(function(status, details) serveSession:onStatusChanged(function(status, details)
if status == ServeSession.Status.Connecting then if status == ServeSession.Status.Connecting then
if self.dismissSyncReminder then if self.dismissSyncReminder then
@@ -773,11 +765,13 @@ function App:startSession()
end end
end end
self:setState({
connectingText = "Computing diff view...",
})
self:setState({ self:setState({
appStatus = AppStatus.Confirming, appStatus = AppStatus.Confirming,
patchTree = PatchTree.build(patch, instanceMap, { "Property", "Current", "Incoming" }),
confirmData = { confirmData = {
instanceMap = instanceMap,
patch = patch,
serverInfo = serverInfo, serverInfo = serverInfo,
}, },
toolbarIcon = Assets.Images.PluginButton, toolbarIcon = Assets.Images.PluginButton,
@@ -888,6 +882,7 @@ function App:render()
ConfirmingPage = createPageElement(AppStatus.Confirming, { ConfirmingPage = createPageElement(AppStatus.Confirming, {
confirmData = self.state.confirmData, confirmData = self.state.confirmData,
patchTree = self.state.patchTree,
createPopup = not self.state.guiEnabled, createPopup = not self.state.guiEnabled,
onAbort = function() onAbort = function()
@@ -901,7 +896,9 @@ function App:render()
end, end,
}), }),
Connecting = createPageElement(AppStatus.Connecting), Connecting = createPageElement(AppStatus.Connecting, {
text = self.state.connectingText,
}),
Connected = createPageElement(AppStatus.Connected, { Connected = createPageElement(AppStatus.Connected, {
projectName = self.state.projectName, projectName = self.state.projectName,

View File

@@ -16,6 +16,14 @@ local Types = require(Plugin.Types)
local decodeValue = require(Plugin.Reconciler.decodeValue) local decodeValue = require(Plugin.Reconciler.decodeValue)
local getProperty = require(Plugin.Reconciler.getProperty) local getProperty = require(Plugin.Reconciler.getProperty)
local function yieldIfNeeded(clock)
if os.clock() - clock > 1 / 20 then
task.wait()
return os.clock()
end
return clock
end
local function alphabeticalNext(t, state) local function alphabeticalNext(t, state)
-- Equivalent of the next function, but returns the keys in the alphabetic -- Equivalent of the next function, but returns the keys in the alphabetic
-- order of node names. We use a temporary ordered key table that is stored in the -- order of node names. We use a temporary ordered key table that is stored in the
@@ -132,7 +140,6 @@ end
-- props must contain id, and cannot contain children or parentId -- props must contain id, and cannot contain children or parentId
-- other than those three, it can hold anything -- other than those three, it can hold anything
function Tree:addNode(parent, props) function Tree:addNode(parent, props)
Timer.start("Tree:addNode")
assert(props.id, "props must contain id") assert(props.id, "props must contain id")
parent = parent or "ROOT" parent = parent or "ROOT"
@@ -143,7 +150,6 @@ function Tree:addNode(parent, props)
for k, v in props do for k, v in props do
node[k] = v node[k] = v
end end
Timer.stop()
return node return node
end end
@@ -154,25 +160,25 @@ function Tree:addNode(parent, props)
local parentNode = self:getNode(parent) local parentNode = self:getNode(parent)
if not parentNode then if not parentNode then
Log.warn("Failed to create node since parent doesnt exist: {}, {}", parent, props) Log.warn("Failed to create node since parent doesnt exist: {}, {}", parent, props)
Timer.stop()
return return
end end
parentNode.children[node.id] = node parentNode.children[node.id] = node
self.idToNode[node.id] = node self.idToNode[node.id] = node
Timer.stop()
return node return node
end end
-- Given a list of ancestor ids in descending order, builds the nodes for them -- Given a list of ancestor ids in descending order, builds the nodes for them
-- using the patch and instanceMap info -- using the patch and instanceMap info
function Tree:buildAncestryNodes(previousId: string?, ancestryIds: { string }, patch, instanceMap) function Tree:buildAncestryNodes(previousId: string?, ancestryIds: { string }, patch, instanceMap)
Timer.start("Tree:buildAncestryNodes") local clock = os.clock()
-- Build nodes for ancestry by going up the tree -- Build nodes for ancestry by going up the tree
previousId = previousId or "ROOT" previousId = previousId or "ROOT"
for _, ancestorId in ancestryIds do for _, ancestorId in ancestryIds do
clock = yieldIfNeeded(clock)
local value = instanceMap.fromIds[ancestorId] or patch.added[ancestorId] local value = instanceMap.fromIds[ancestorId] or patch.added[ancestorId]
if not value then if not value then
Log.warn("Failed to find ancestor object for " .. ancestorId) Log.warn("Failed to find ancestor object for " .. ancestorId)
@@ -186,8 +192,6 @@ function Tree:buildAncestryNodes(previousId: string?, ancestryIds: { string }, p
}) })
previousId = ancestorId previousId = ancestorId
end end
Timer.stop()
end end
local PatchTree = {} local PatchTree = {}
@@ -196,12 +200,16 @@ local PatchTree = {}
-- uses changeListHeaders in node.changeList -- uses changeListHeaders in node.changeList
function PatchTree.build(patch, instanceMap, changeListHeaders) function PatchTree.build(patch, instanceMap, changeListHeaders)
Timer.start("PatchTree.build") Timer.start("PatchTree.build")
local clock = os.clock()
local tree = Tree.new() local tree = Tree.new()
local knownAncestors = {} local knownAncestors = {}
Timer.start("patch.updated") Timer.start("patch.updated")
for _, change in patch.updated do for _, change in patch.updated do
clock = yieldIfNeeded(clock)
local instance = instanceMap.fromIds[change.id] local instance = instanceMap.fromIds[change.id]
if not instance then if not instance then
continue continue
@@ -281,6 +289,8 @@ function PatchTree.build(patch, instanceMap, changeListHeaders)
Timer.start("patch.removed") Timer.start("patch.removed")
for _, idOrInstance in patch.removed do for _, idOrInstance in patch.removed do
clock = yieldIfNeeded(clock)
local instance = if Types.RbxId(idOrInstance) then instanceMap.fromIds[idOrInstance] else idOrInstance local instance = if Types.RbxId(idOrInstance) then instanceMap.fromIds[idOrInstance] else idOrInstance
if not instance then if not instance then
-- If we're viewing a past patch, the instance is already removed -- If we're viewing a past patch, the instance is already removed
@@ -325,6 +335,8 @@ function PatchTree.build(patch, instanceMap, changeListHeaders)
Timer.start("patch.added") Timer.start("patch.added")
for id, change in patch.added do for id, change in patch.added do
clock = yieldIfNeeded(clock)
-- Gather ancestors from existing DOM or future additions -- Gather ancestors from existing DOM or future additions
local ancestryIds = {} local ancestryIds = {}
local parentId = change.Parent local parentId = change.Parent

View File

@@ -48,6 +48,12 @@ local function debugPatch(object)
end) end)
end end
local function attemptReparent(instance, parent)
return pcall(function()
instance.Parent = parent
end)
end
local ServeSession = {} local ServeSession = {}
ServeSession.__index = ServeSession ServeSession.__index = ServeSession
@@ -101,6 +107,7 @@ function ServeSession.new(options)
__connections = connections, __connections = connections,
__precommitCallbacks = {}, __precommitCallbacks = {},
__postcommitCallbacks = {}, __postcommitCallbacks = {},
__updateLoadingText = function() end,
} }
setmetatable(self, ServeSession) setmetatable(self, ServeSession)
@@ -131,6 +138,14 @@ function ServeSession:setConfirmCallback(callback)
self.__userConfirmCallback = callback self.__userConfirmCallback = callback
end end
function ServeSession:setUpdateLoadingTextCallback(callback)
self.__updateLoadingText = callback
end
function ServeSession:setLoadingText(text: string)
self.__updateLoadingText(text)
end
--[=[ --[=[
Hooks a function to run before patch application. Hooks a function to run before patch application.
The provided function is called with the incoming patch and an InstanceMap The provided function is called with the incoming patch and an InstanceMap
@@ -175,11 +190,14 @@ end
function ServeSession:start() function ServeSession:start()
self:__setStatus(Status.Connecting) self:__setStatus(Status.Connecting)
self:setLoadingText("Connecting to server...")
self.__apiContext self.__apiContext
:connect() :connect()
:andThen(function(serverInfo) :andThen(function(serverInfo)
self:setLoadingText("Loading initial data from server...")
return self:__initialSync(serverInfo):andThen(function() return self:__initialSync(serverInfo):andThen(function()
self:setLoadingText("Starting sync loop...")
self:__setStatus(Status.Connected, serverInfo.projectName) self:__setStatus(Status.Connected, serverInfo.projectName)
self:__applyGameAndPlaceId(serverInfo) self:__applyGameAndPlaceId(serverInfo)
@@ -291,18 +309,52 @@ function ServeSession:__replaceInstances(idList)
for id, replacement in replacements do for id, replacement in replacements do
local oldInstance = self.__instanceMap.fromIds[id] local oldInstance = self.__instanceMap.fromIds[id]
if not oldInstance then
-- TODO: Why would this happen?
Log.warn("Instance {} not found in InstanceMap during sync replacement", id)
continue
end
self.__instanceMap:insert(id, replacement) self.__instanceMap:insert(id, replacement)
Log.trace("Swapping Instance {} out via api/models/ endpoint", id) Log.trace("Swapping Instance {} out via api/models/ endpoint", id)
local oldParent = oldInstance.Parent local oldParent = oldInstance.Parent
for _, child in oldInstance:GetChildren() do for _, child in oldInstance:GetChildren() do
child.Parent = replacement -- Some children cannot be reparented, such as a TouchTransmitter
local reparentSuccess, reparentError = attemptReparent(child, replacement)
if not reparentSuccess then
Log.warn(
"Could not reparent child {} of instance {} during sync replacement: {}",
child.Name,
oldInstance.Name,
reparentError
)
end
end end
replacement.Parent = oldParent
-- ChangeHistoryService doesn't like it if an Instance has been -- ChangeHistoryService doesn't like it if an Instance has been
-- Destroyed. So, we have to accept the potential memory hit and -- Destroyed. So, we have to accept the potential memory hit and
-- just set the parent to `nil`. -- just set the parent to `nil`.
oldInstance.Parent = nil local deleteSuccess, deleteError = attemptReparent(oldInstance, nil)
local replaceSuccess, replaceError = attemptReparent(replacement, oldParent)
if not (deleteSuccess and replaceSuccess) then
Log.warn(
"Could not swap instances {} and {} during sync replacement: {}",
oldInstance.Name,
replacement.Name,
(deleteError or "") .. "\n" .. (replaceError or "")
)
-- We need to revert the failed swap to avoid losing the old instance and children.
for _, child in replacement:GetChildren() do
attemptReparent(child, oldInstance)
end
attemptReparent(oldInstance, oldParent)
-- Our replacement should never have existed in the first place, so we can just destroy it.
replacement:Destroy()
continue
end
if selectionMap[oldInstance] then if selectionMap[oldInstance] then
-- This is a bit funky, but it saves the order of Selection -- This is a bit funky, but it saves the order of Selection
@@ -349,18 +401,11 @@ function ServeSession:__applyPatch(patch)
error(unappliedPatch) error(unappliedPatch)
end end
if PatchSet.isEmpty(unappliedPatch) then if Settings:get("enableSyncFallback") and not PatchSet.isEmpty(unappliedPatch) then
if historyRecording then -- Some changes did not apply, let's try replacing them instead
ChangeHistoryService:FinishRecording(historyRecording, Enum.FinishRecordingOperation.Commit) local addedIdList = PatchSet.addedIdList(unappliedPatch)
end local updatedIdList = PatchSet.updatedIdList(unappliedPatch)
return
end
local addedIdList = PatchSet.addedIdList(unappliedPatch)
local updatedIdList = PatchSet.updatedIdList(unappliedPatch)
local actualUnappliedPatches = PatchSet.newEmpty()
if Settings:get("enableSyncFallback") then
Log.debug("ServeSession:__replaceInstances(unappliedPatch.added)") Log.debug("ServeSession:__replaceInstances(unappliedPatch.added)")
Timer.start("ServeSession:__replaceInstances(unappliedPatch.added)") Timer.start("ServeSession:__replaceInstances(unappliedPatch.added)")
local addSuccess, unappliedAddedRefs = self:__replaceInstances(addedIdList) local addSuccess, unappliedAddedRefs = self:__replaceInstances(addedIdList)
@@ -371,20 +416,18 @@ function ServeSession:__applyPatch(patch)
local updateSuccess, unappliedUpdateRefs = self:__replaceInstances(updatedIdList) local updateSuccess, unappliedUpdateRefs = self:__replaceInstances(updatedIdList)
Timer.stop() Timer.stop()
-- Update the unapplied patch to reflect which Instances were replaced successfully
if addSuccess then if addSuccess then
table.clear(unappliedPatch.added) table.clear(unappliedPatch.added)
PatchSet.assign(actualUnappliedPatches, unappliedAddedRefs) PatchSet.assign(unappliedPatch, unappliedAddedRefs)
end end
if updateSuccess then if updateSuccess then
table.clear(unappliedPatch.updated) table.clear(unappliedPatch.updated)
PatchSet.assign(actualUnappliedPatches, unappliedUpdateRefs) PatchSet.assign(unappliedPatch, unappliedUpdateRefs)
end end
else
Log.debug("Skipping ServeSession:__replaceInstances because of setting")
end end
PatchSet.assign(actualUnappliedPatches, unappliedPatch)
if not PatchSet.isEmpty(actualUnappliedPatches) then if not PatchSet.isEmpty(unappliedPatch) then
Log.debug( Log.debug(
"Could not apply all changes requested by the Rojo server:\n{}", "Could not apply all changes requested by the Rojo server:\n{}",
PatchSet.humanSummary(self.__instanceMap, unappliedPatch) PatchSet.humanSummary(self.__instanceMap, unappliedPatch)
@@ -396,7 +439,7 @@ function ServeSession:__applyPatch(patch)
-- guaranteed to be called after the commit -- guaranteed to be called after the commit
for _, callback in self.__postcommitCallbacks do for _, callback in self.__postcommitCallbacks do
task.spawn(function() task.spawn(function()
local success, err = pcall(callback, patch, self.__instanceMap, actualUnappliedPatches) local success, err = pcall(callback, patch, self.__instanceMap, unappliedPatch)
if not success then if not success then
Log.warn("Postcommit hook errored: {}", err) Log.warn("Postcommit hook errored: {}", err)
end end
@@ -418,11 +461,13 @@ function ServeSession:__initialSync(serverInfo)
-- For any instances that line up with the Rojo server's view, start -- For any instances that line up with the Rojo server's view, start
-- tracking them in the reconciler. -- tracking them in the reconciler.
Log.trace("Matching existing Roblox instances to Rojo IDs") Log.trace("Matching existing Roblox instances to Rojo IDs")
self:setLoadingText("Hydrating instance map...")
self.__reconciler:hydrate(readResponseBody.instances, serverInfo.rootInstanceId, game) self.__reconciler:hydrate(readResponseBody.instances, serverInfo.rootInstanceId, game)
-- Calculate the initial patch to apply to the DataModel to catch us -- Calculate the initial patch to apply to the DataModel to catch us
-- up to what Rojo thinks the place should look like. -- up to what Rojo thinks the place should look like.
Log.trace("Computing changes that plugin needs to make to catch up to server...") Log.trace("Computing changes that plugin needs to make to catch up to server...")
self:setLoadingText("Finding differences between server and Studio...")
local success, catchUpPatch = local success, catchUpPatch =
self.__reconciler:diff(readResponseBody.instances, serverInfo.rootInstanceId, game) self.__reconciler:diff(readResponseBody.instances, serverInfo.rootInstanceId, game)

View File

@@ -1,45 +1,49 @@
use std::io::{self, Write};
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio}; use std::process::{Command, Stdio};
use std::str::FromStr; use std::str::FromStr;
use std::{
collections::VecDeque,
path::{Path, PathBuf},
};
use std::{
ffi::OsStr,
io::{self, Write},
};
use anyhow::{bail, format_err}; use anyhow::{bail, format_err};
use clap::Parser; use clap::Parser;
use fs_err as fs; use fs_err as fs;
use fs_err::OpenOptions; use fs_err::OpenOptions;
use memofs::{InMemoryFs, Vfs, VfsSnapshot};
use super::resolve_path; use super::resolve_path;
static MODEL_PROJECT: &str = const GIT_IGNORE_PLACEHOLDER: &str = "gitignore.txt";
include_str!("../../assets/default-model-project/default.project.json");
static MODEL_README: &str = include_str!("../../assets/default-model-project/README.md");
static MODEL_INIT: &str = include_str!("../../assets/default-model-project/src-init.luau");
static MODEL_GIT_IGNORE: &str = include_str!("../../assets/default-model-project/gitignore.txt");
static PLACE_PROJECT: &str = static TEMPLATE_BINCODE: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/templates.bincode"));
include_str!("../../assets/default-place-project/default.project.json");
static PLACE_README: &str = include_str!("../../assets/default-place-project/README.md");
static PLACE_GIT_IGNORE: &str = include_str!("../../assets/default-place-project/gitignore.txt");
static PLUGIN_PROJECT: &str =
include_str!("../../assets/default-plugin-project/default.project.json");
static PLUGIN_README: &str = include_str!("../../assets/default-plugin-project/README.md");
static PLUGIN_GIT_IGNORE: &str = include_str!("../../assets/default-plugin-project/gitignore.txt");
/// Initializes a new Rojo project. /// Initializes a new Rojo project.
///
/// By default, this will attempt to initialize a 'git' repository in the
/// project directory if `git` is installed. To avoid this, pass `--skip-git`.
#[derive(Debug, Parser)] #[derive(Debug, Parser)]
pub struct InitCommand { pub struct InitCommand {
/// Path to the place to create the project. Defaults to the current directory. /// Path to the place to create the project. Defaults to the current directory.
#[clap(default_value = "")] #[clap(default_value = "")]
pub path: PathBuf, pub path: PathBuf,
/// The kind of project to create, 'place', 'plugin', or 'model'. Defaults to place. /// The kind of project to create, 'place', 'plugin', or 'model'.
#[clap(long, default_value = "place")] #[clap(long, default_value = "place")]
pub kind: InitKind, pub kind: InitKind,
/// Skips the initialization of a git repository.
#[clap(long)]
pub skip_git: bool,
} }
impl InitCommand { impl InitCommand {
pub fn run(self) -> anyhow::Result<()> { pub fn run(self) -> anyhow::Result<()> {
let template = self.kind.template();
let base_path = resolve_path(&self.path); let base_path = resolve_path(&self.path);
fs::create_dir_all(&base_path)?; fs::create_dir_all(&base_path)?;
@@ -53,10 +57,51 @@ impl InitCommand {
name: project_name.to_owned(), name: project_name.to_owned(),
}; };
match self.kind { println!(
InitKind::Place => init_place(&base_path, project_params)?, "Creating new {:?} project '{}'",
InitKind::Model => init_model(&base_path, project_params)?, self.kind, project_params.name
InitKind::Plugin => init_plugin(&base_path, project_params)?, );
let vfs = Vfs::new(template);
vfs.set_watch_enabled(false);
let mut queue = VecDeque::with_capacity(8);
for entry in vfs.read_dir("")? {
queue.push_back(entry?.path().to_path_buf())
}
while let Some(mut path) = queue.pop_front() {
let metadata = vfs.metadata(&path)?;
if metadata.is_dir() {
fs_err::create_dir(base_path.join(&path))?;
for entry in vfs.read_dir(&path)? {
queue.push_back(entry?.path().to_path_buf());
}
} else {
let content = vfs.read_to_string_lf_normalized(&path)?;
if let Some(file_stem) = path.file_name().and_then(OsStr::to_str) {
if file_stem == GIT_IGNORE_PLACEHOLDER && !self.skip_git {
path.set_file_name(".gitignore");
}
}
write_if_not_exists(
&base_path.join(&path),
&project_params.render_template(&content),
)?;
}
}
if !self.skip_git && should_git_init(&base_path) {
log::debug!("Initializing Git repository...");
let status = Command::new("git")
.arg("init")
.current_dir(&base_path)
.status()?;
if !status.success() {
bail!("git init failed: status code {:?}", status.code());
}
} }
println!("Created project successfully."); println!("Created project successfully.");
@@ -78,6 +123,32 @@ pub enum InitKind {
Plugin, Plugin,
} }
impl InitKind {
fn template(&self) -> InMemoryFs {
let template_path = match self {
Self::Place => "place",
Self::Model => "model",
Self::Plugin => "plugin",
};
let snapshot: VfsSnapshot = bincode::deserialize(TEMPLATE_BINCODE)
.expect("Rojo's templates were not properly packed into Rojo's binary");
if let VfsSnapshot::Dir { mut children } = snapshot {
if let Some(template) = children.remove(template_path) {
let mut fs = InMemoryFs::new();
fs.load_snapshot("", template)
.expect("loading a template in memory should never fail");
fs
} else {
panic!("template for project type {:?} is missing", self)
}
} else {
panic!("Rojo's templates were packed as a file instead of a directory")
}
}
}
impl FromStr for InitKind { impl FromStr for InitKind {
type Err = anyhow::Error; type Err = anyhow::Error;
@@ -94,92 +165,6 @@ impl FromStr for InitKind {
} }
} }
fn init_place(base_path: &Path, project_params: ProjectParams) -> anyhow::Result<()> {
println!("Creating new place project '{}'", project_params.name);
let project_file = project_params.render_template(PLACE_PROJECT);
try_create_project(base_path, &project_file)?;
let readme = project_params.render_template(PLACE_README);
write_if_not_exists(&base_path.join("README.md"), &readme)?;
let src = base_path.join("src");
fs::create_dir_all(&src)?;
let src_shared = src.join("shared");
fs::create_dir_all(src.join(&src_shared))?;
let src_server = src.join("server");
fs::create_dir_all(src.join(&src_server))?;
let src_client = src.join("client");
fs::create_dir_all(src.join(&src_client))?;
write_if_not_exists(
&src_shared.join("Hello.luau"),
"return function()\n\tprint(\"Hello, world!\")\nend",
)?;
write_if_not_exists(
&src_server.join("init.server.luau"),
"print(\"Hello world, from server!\")",
)?;
write_if_not_exists(
&src_client.join("init.client.luau"),
"print(\"Hello world, from client!\")",
)?;
let git_ignore = project_params.render_template(PLACE_GIT_IGNORE);
try_git_init(base_path, &git_ignore)?;
Ok(())
}
fn init_model(base_path: &Path, project_params: ProjectParams) -> anyhow::Result<()> {
println!("Creating new model project '{}'", project_params.name);
let project_file = project_params.render_template(MODEL_PROJECT);
try_create_project(base_path, &project_file)?;
let readme = project_params.render_template(MODEL_README);
write_if_not_exists(&base_path.join("README.md"), &readme)?;
let src = base_path.join("src");
fs::create_dir_all(&src)?;
let init = project_params.render_template(MODEL_INIT);
write_if_not_exists(&src.join("init.luau"), &init)?;
let git_ignore = project_params.render_template(MODEL_GIT_IGNORE);
try_git_init(base_path, &git_ignore)?;
Ok(())
}
fn init_plugin(base_path: &Path, project_params: ProjectParams) -> anyhow::Result<()> {
println!("Creating new plugin project '{}'", project_params.name);
let project_file = project_params.render_template(PLUGIN_PROJECT);
try_create_project(base_path, &project_file)?;
let readme = project_params.render_template(PLUGIN_README);
write_if_not_exists(&base_path.join("README.md"), &readme)?;
let src = base_path.join("src");
fs::create_dir_all(&src)?;
write_if_not_exists(
&src.join("init.server.luau"),
"print(\"Hello world, from plugin!\")\n",
)?;
let git_ignore = project_params.render_template(PLUGIN_GIT_IGNORE);
try_git_init(base_path, &git_ignore)?;
Ok(())
}
/// Contains parameters used in templates to create a project. /// Contains parameters used in templates to create a project.
struct ProjectParams { struct ProjectParams {
name: String, name: String,
@@ -194,23 +179,6 @@ impl ProjectParams {
} }
} }
/// Attempt to initialize a Git repository if necessary, and create .gitignore.
fn try_git_init(path: &Path, git_ignore: &str) -> Result<(), anyhow::Error> {
if should_git_init(path) {
log::debug!("Initializing Git repository...");
let status = Command::new("git").arg("init").current_dir(path).status()?;
if !status.success() {
bail!("git init failed: status code {:?}", status.code());
}
}
write_if_not_exists(&path.join(".gitignore"), git_ignore)?;
Ok(())
}
/// Tells whether we should initialize a Git repository inside the given path. /// Tells whether we should initialize a Git repository inside the given path.
/// ///
/// Will return false if the user doesn't have Git installed or if the path is /// Will return false if the user doesn't have Git installed or if the path is
@@ -251,29 +219,3 @@ fn write_if_not_exists(path: &Path, contents: &str) -> Result<(), anyhow::Error>
Ok(()) Ok(())
} }
/// Try to create a project file and fail if it already exists.
fn try_create_project(base_path: &Path, contents: &str) -> Result<(), anyhow::Error> {
let project_path = base_path.join("default.project.json");
let file_res = OpenOptions::new()
.write(true)
.create_new(true)
.open(&project_path);
let mut file = match file_res {
Ok(file) => file,
Err(err) => {
return match err.kind() {
io::ErrorKind::AlreadyExists => {
bail!("Project file already exists: {}", project_path.display())
}
_ => Err(err.into()),
}
}
};
file.write_all(contents.as_bytes())?;
Ok(())
}

View File

@@ -1,7 +1,6 @@
use std::path::PathBuf; use std::path::PathBuf;
use std::str::FromStr;
use anyhow::{bail, format_err, Context}; use anyhow::{bail, Context};
use clap::Parser; use clap::Parser;
use memofs::Vfs; use memofs::Vfs;
use reqwest::{ use reqwest::{
@@ -91,32 +90,6 @@ impl UploadCommand {
} }
} }
/// The kind of asset to upload to the website. Affects what endpoints Rojo uses
/// and changes how the asset is built.
#[derive(Debug, Clone, Copy)]
enum UploadKind {
/// Upload to a place.
Place,
/// Upload to a model-like asset, like a Model, Plugin, or Package.
Model,
}
impl FromStr for UploadKind {
type Err = anyhow::Error;
fn from_str(source: &str) -> Result<Self, Self::Err> {
match source {
"place" => Ok(UploadKind::Place),
"model" => Ok(UploadKind::Model),
attempted => Err(format_err!(
"Invalid upload kind '{}'. Valid kinds are: place, model",
attempted
)),
}
}
}
fn do_upload(buffer: Vec<u8>, asset_id: u64, cookie: &str) -> anyhow::Result<()> { fn do_upload(buffer: Vec<u8>, asset_id: u64, cookie: &str) -> anyhow::Result<()> {
let url = format!( let url = format!(
"https://data.roblox.com/Data/Upload.ashx?assetid={}", "https://data.roblox.com/Data/Upload.ashx?assetid={}",

View File

@@ -43,8 +43,8 @@ impl Serialize for Glob {
impl<'de> Deserialize<'de> for Glob { impl<'de> Deserialize<'de> for Glob {
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> { fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let glob = <&str as Deserialize>::deserialize(deserializer)?; let glob = String::deserialize(deserializer)?;
Glob::new(glob).map_err(D::Error::custom) Glob::new(&glob).map_err(D::Error::custom)
} }
} }

313
src/json.rs Normal file
View File

@@ -0,0 +1,313 @@
//! Utilities for parsing JSON with comments (JSONC) and deserializing to Rust types.
//!
//! This module provides convenient wrappers around `jsonc_parser` and `serde_json`
//! to reduce boilerplate and improve ergonomics when working with JSONC files.
use anyhow::Context as _;
use serde::de::DeserializeOwned;
/// Parse JSONC text into a `serde_json::Value`.
///
/// This handles the common pattern of calling `jsonc_parser::parse_to_serde_value`
/// and unwrapping the `Option` with a clear error message.
///
/// # Errors
///
/// Returns an error if:
/// - The text is not valid JSONC
/// - The text contains no JSON value
pub fn parse_value(text: &str) -> anyhow::Result<serde_json::Value> {
jsonc_parser::parse_to_serde_value(text, &Default::default())
.context("Failed to parse JSONC")?
.ok_or_else(|| anyhow::anyhow!("File contains no JSON value"))
}
/// Parse JSONC text into a `serde_json::Value` with a custom context message.
///
/// This is useful when you want to provide a specific error message that includes
/// additional information like the file path.
///
/// # Errors
///
/// Returns an error if:
/// - The text is not valid JSONC
/// - The text contains no JSON value
pub fn parse_value_with_context(
text: &str,
context: impl Fn() -> String,
) -> anyhow::Result<serde_json::Value> {
jsonc_parser::parse_to_serde_value(text, &Default::default())
.with_context(|| format!("{}: JSONC parse error", context()))?
.ok_or_else(|| anyhow::anyhow!("{}: File contains no JSON value", context()))
}
/// Parse JSONC text and deserialize it into a specific type.
///
/// This combines parsing JSONC and deserializing into a single operation,
/// eliminating the need to manually chain `parse_to_serde_value` and `from_value`.
///
/// # Errors
///
/// Returns an error if:
/// - The text is not valid JSONC
/// - The text contains no JSON value
/// - The value cannot be deserialized into type `T`
pub fn from_str<T: DeserializeOwned>(text: &str) -> anyhow::Result<T> {
let value = parse_value(text)?;
serde_json::from_value(value).context("Failed to deserialize JSON")
}
/// Parse JSONC text and deserialize it into a specific type with a custom context message.
///
/// This is useful when you want to provide a specific error message that includes
/// additional information like the file path.
///
/// # Errors
///
/// Returns an error if:
/// - The text is not valid JSONC
/// - The text contains no JSON value
/// - The value cannot be deserialized into type `T`
pub fn from_str_with_context<T: DeserializeOwned>(
text: &str,
context: impl Fn() -> String,
) -> anyhow::Result<T> {
let value = parse_value_with_context(text, &context)?;
serde_json::from_value(value).with_context(|| format!("{}: Invalid JSON structure", context()))
}
/// Parse JSONC bytes into a `serde_json::Value` with a custom context message.
///
/// This handles UTF-8 conversion and JSONC parsing in one step.
///
/// # Errors
///
/// Returns an error if:
/// - The bytes are not valid UTF-8
/// - The text is not valid JSONC
/// - The text contains no JSON value
pub fn parse_value_from_slice_with_context(
slice: &[u8],
context: impl Fn() -> String,
) -> anyhow::Result<serde_json::Value> {
let text = std::str::from_utf8(slice)
.with_context(|| format!("{}: File is not valid UTF-8", context()))?;
parse_value_with_context(text, context)
}
/// Parse JSONC bytes and deserialize it into a specific type.
///
/// This handles UTF-8 conversion, JSONC parsing, and deserialization in one step.
///
/// # Errors
///
/// Returns an error if:
/// - The bytes are not valid UTF-8
/// - The text is not valid JSONC
/// - The text contains no JSON value
/// - The value cannot be deserialized into type `T`
pub fn from_slice<T: DeserializeOwned>(slice: &[u8]) -> anyhow::Result<T> {
let text = std::str::from_utf8(slice).context("File is not valid UTF-8")?;
from_str(text)
}
/// Parse JSONC bytes and deserialize it into a specific type with a custom context message.
///
/// This handles UTF-8 conversion, JSONC parsing, and deserialization in one step.
///
/// # Errors
///
/// Returns an error if:
/// - The bytes are not valid UTF-8
/// - The text is not valid JSONC
/// - The text contains no JSON value
/// - The value cannot be deserialized into type `T`
pub fn from_slice_with_context<T: DeserializeOwned>(
slice: &[u8],
context: impl Fn() -> String,
) -> anyhow::Result<T> {
let text = std::str::from_utf8(slice)
.with_context(|| format!("{}: File is not valid UTF-8", context()))?;
from_str_with_context(text, context)
}
#[cfg(test)]
mod tests {
use super::*;
use serde::Deserialize;
#[test]
fn test_parse_value() {
let value = parse_value(r#"{"foo": "bar"}"#).unwrap();
assert_eq!(value["foo"], "bar");
}
#[test]
fn test_parse_value_with_comments() {
let value = parse_value(
r#"{
// This is a comment
"foo": "bar" // Inline comment
}"#,
)
.unwrap();
assert_eq!(value["foo"], "bar");
}
#[test]
fn test_parse_value_with_trailing_comma() {
let value = parse_value(
r#"{
"foo": "bar",
"baz": 123,
}"#,
)
.unwrap();
assert_eq!(value["foo"], "bar");
assert_eq!(value["baz"], 123);
}
#[test]
fn test_parse_value_empty() {
let err = parse_value("").unwrap_err();
assert!(err.to_string().contains("no JSON value"));
}
#[test]
fn test_parse_value_invalid() {
let err = parse_value("{invalid}").unwrap_err();
assert!(err.to_string().contains("parse"));
}
#[test]
fn test_parse_value_with_context() {
let err = parse_value_with_context("{invalid}", || "test.json".to_string()).unwrap_err();
assert!(err.to_string().contains("test.json"));
assert!(err.to_string().contains("parse"));
}
#[derive(Debug, Deserialize, PartialEq)]
struct TestStruct {
foo: String,
bar: i32,
}
#[test]
fn test_from_str() {
let result: TestStruct = from_str(r#"{"foo": "hello", "bar": 42}"#).unwrap();
assert_eq!(
result,
TestStruct {
foo: "hello".to_string(),
bar: 42
}
);
}
#[test]
fn test_from_str_with_comments() {
let result: TestStruct = from_str(
r#"{
// Comment
"foo": "hello",
"bar": 42, // Trailing comma is fine
}"#,
)
.unwrap();
assert_eq!(
result,
TestStruct {
foo: "hello".to_string(),
bar: 42
}
);
}
#[test]
fn test_from_str_invalid_type() {
let err = from_str::<TestStruct>(r#"{"foo": "hello"}"#).unwrap_err();
assert!(err.to_string().contains("deserialize"));
}
#[test]
fn test_from_str_with_context() {
let err = from_str_with_context::<TestStruct>(r#"{"foo": "hello"}"#, || {
"config.json".to_string()
})
.unwrap_err();
assert!(err.to_string().contains("config.json"));
assert!(err.to_string().contains("Invalid JSON structure"));
}
#[test]
fn test_parse_value_from_slice_with_context() {
let err = parse_value_from_slice_with_context(b"{invalid}", || "test.json".to_string())
.unwrap_err();
assert!(err.to_string().contains("test.json"));
assert!(err.to_string().contains("parse"));
}
#[test]
fn test_parse_value_from_slice_with_context_invalid_utf8() {
let err = parse_value_from_slice_with_context(&[0xFF, 0xFF], || "test.json".to_string())
.unwrap_err();
assert!(err.to_string().contains("test.json"));
assert!(err.to_string().contains("UTF-8"));
}
#[test]
fn test_from_slice() {
let result: TestStruct = from_slice(br#"{"foo": "hello", "bar": 42}"#).unwrap();
assert_eq!(
result,
TestStruct {
foo: "hello".to_string(),
bar: 42
}
);
}
#[test]
fn test_from_slice_with_comments() {
let result: TestStruct = from_slice(
br#"{
// Comment
"foo": "hello",
"bar": 42, // Trailing comma is fine
}"#,
)
.unwrap();
assert_eq!(
result,
TestStruct {
foo: "hello".to_string(),
bar: 42
}
);
}
#[test]
fn test_from_slice_invalid_utf8() {
let err = from_slice::<TestStruct>(&[0xFF, 0xFF]).unwrap_err();
assert!(err.to_string().contains("UTF-8"));
}
#[test]
fn test_from_slice_with_context() {
let err = from_slice_with_context::<TestStruct>(br#"{"foo": "hello"}"#, || {
"config.json".to_string()
})
.unwrap_err();
assert!(err.to_string().contains("config.json"));
assert!(err.to_string().contains("Invalid JSON structure"));
}
#[test]
fn test_from_slice_with_context_invalid_utf8() {
let err =
from_slice_with_context::<TestStruct>(&[0xFF, 0xFF], || "config.json".to_string())
.unwrap_err();
assert!(err.to_string().contains("config.json"));
assert!(err.to_string().contains("UTF-8"));
}
}

View File

@@ -10,6 +10,7 @@ mod tree_view;
mod auth_cookie; mod auth_cookie;
mod change_processor; mod change_processor;
mod glob; mod glob;
mod json;
mod lua_ast; mod lua_ast;
mod message_queue; mod message_queue;
mod multimap; mod multimap;

View File

@@ -11,7 +11,7 @@ use rbx_dom_weak::{Ustr, UstrMap};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use thiserror::Error; use thiserror::Error;
use crate::{glob::Glob, resolution::UnresolvedValue, snapshot::SyncRule}; use crate::{glob::Glob, json, resolution::UnresolvedValue, snapshot::SyncRule};
static PROJECT_FILENAME: &str = "default.project.json"; static PROJECT_FILENAME: &str = "default.project.json";
@@ -214,8 +214,11 @@ impl Project {
project_file_location: PathBuf, project_file_location: PathBuf,
fallback_name: Option<&str>, fallback_name: Option<&str>,
) -> Result<Self, Error> { ) -> Result<Self, Error> {
let mut project: Self = serde_json::from_slice(contents).map_err(|source| Error::Json { let mut project: Self = json::from_slice(contents).map_err(|e| Error::Json {
source, source: serde_json::Error::io(std::io::Error::new(
std::io::ErrorKind::InvalidData,
e.to_string(),
)),
path: project_file_location.clone(), path: project_file_location.clone(),
})?; })?;
project.file_location = project_file_location; project.file_location = project_file_location;
@@ -399,13 +402,13 @@ mod test {
#[test] #[test]
fn path_node_required() { fn path_node_required() {
let path_node: PathNode = serde_json::from_str(r#""src""#).unwrap(); let path_node: PathNode = json::from_str(r#""src""#).unwrap();
assert_eq!(path_node, PathNode::Required(PathBuf::from("src"))); assert_eq!(path_node, PathNode::Required(PathBuf::from("src")));
} }
#[test] #[test]
fn path_node_optional() { fn path_node_optional() {
let path_node: PathNode = serde_json::from_str(r#"{ "optional": "src" }"#).unwrap(); let path_node: PathNode = json::from_str(r#"{ "optional": "src" }"#).unwrap();
assert_eq!( assert_eq!(
path_node, path_node,
PathNode::Optional(OptionalPathNode::new(PathBuf::from("src"))) PathNode::Optional(OptionalPathNode::new(PathBuf::from("src")))
@@ -414,7 +417,7 @@ mod test {
#[test] #[test]
fn project_node_required() { fn project_node_required() {
let project_node: ProjectNode = serde_json::from_str( let project_node: ProjectNode = json::from_str(
r#"{ r#"{
"$path": "src" "$path": "src"
}"#, }"#,
@@ -429,7 +432,7 @@ mod test {
#[test] #[test]
fn project_node_optional() { fn project_node_optional() {
let project_node: ProjectNode = serde_json::from_str( let project_node: ProjectNode = json::from_str(
r#"{ r#"{
"$path": { "optional": "src" } "$path": { "optional": "src" }
}"#, }"#,
@@ -446,7 +449,7 @@ mod test {
#[test] #[test]
fn project_node_none() { fn project_node_none() {
let project_node: ProjectNode = serde_json::from_str( let project_node: ProjectNode = json::from_str(
r#"{ r#"{
"$className": "Folder" "$className": "Folder"
}"#, }"#,
@@ -458,7 +461,7 @@ mod test {
#[test] #[test]
fn project_node_optional_serialize_absolute() { fn project_node_optional_serialize_absolute() {
let project_node: ProjectNode = serde_json::from_str( let project_node: ProjectNode = json::from_str(
r#"{ r#"{
"$path": { "optional": "..\\src" } "$path": { "optional": "..\\src" }
}"#, }"#,
@@ -471,7 +474,7 @@ mod test {
#[test] #[test]
fn project_node_optional_serialize_absolute_no_change() { fn project_node_optional_serialize_absolute_no_change() {
let project_node: ProjectNode = serde_json::from_str( let project_node: ProjectNode = json::from_str(
r#"{ r#"{
"$path": { "optional": "../src" } "$path": { "optional": "../src" }
}"#, }"#,
@@ -484,7 +487,7 @@ mod test {
#[test] #[test]
fn project_node_optional_serialize_optional() { fn project_node_optional_serialize_optional() {
let project_node: ProjectNode = serde_json::from_str( let project_node: ProjectNode = json::from_str(
r#"{ r#"{
"$path": "..\\src" "$path": "..\\src"
}"#, }"#,
@@ -494,4 +497,57 @@ mod test {
let serialized = serde_json::to_string(&project_node).unwrap(); let serialized = serde_json::to_string(&project_node).unwrap();
assert_eq!(serialized, r#"{"$path":"../src"}"#); assert_eq!(serialized, r#"{"$path":"../src"}"#);
} }
#[test]
fn project_with_jsonc_features() {
// Test that JSONC features (comments and trailing commas) are properly handled
let project_json = r#"{
// This is a single-line comment
"name": "TestProject",
/* This is a
multi-line comment */
"tree": {
"$path": "src", // Comment after value
},
"servePort": 34567,
"emitLegacyScripts": false,
// Test glob parsing with comments
"globIgnorePaths": [
"**/*.spec.lua", // Ignore test files
"**/*.test.lua",
],
"syncRules": [
{
"pattern": "*.data.json",
"use": "json", // Trailing comma in object
},
{
"pattern": "*.module.lua",
"use": "moduleScript",
}, // Trailing comma in array
], // Another trailing comma
}"#;
let project = Project::load_from_slice(
project_json.as_bytes(),
PathBuf::from("/test/default.project.json"),
None,
)
.expect("Failed to parse project with JSONC features");
// Verify the parsed values
assert_eq!(project.name, Some("TestProject".to_string()));
assert_eq!(project.serve_port, Some(34567));
assert_eq!(project.emit_legacy_scripts, Some(false));
// Verify glob_ignore_paths were parsed correctly
assert_eq!(project.glob_ignore_paths.len(), 2);
assert!(project.glob_ignore_paths[0].is_match("test/foo.spec.lua"));
assert!(project.glob_ignore_paths[1].is_match("test/bar.test.lua"));
// Verify sync_rules were parsed correctly
assert_eq!(project.sync_rules.len(), 2);
assert!(project.sync_rules[0].include.is_match("data.data.json"));
assert!(project.sync_rules[1].include.is_match("init.module.lua"));
}
} }

View File

@@ -62,7 +62,7 @@ impl AmbiguousValue {
match &property.data_type { match &property.data_type {
DataType::Enum(enum_name) => { DataType::Enum(enum_name) => {
let database = rbx_reflection_database::get(); let database = rbx_reflection_database::get().unwrap();
let enum_descriptor = database.enums.get(enum_name).ok_or_else(|| { let enum_descriptor = database.enums.get(enum_name).ok_or_else(|| {
format_err!("Unknown enum {}. This is a Rojo bug!", enum_name) format_err!("Unknown enum {}. This is a Rojo bug!", enum_name)
@@ -203,7 +203,7 @@ fn find_descriptor(
class_name: &str, class_name: &str,
prop_name: &str, prop_name: &str,
) -> Option<&'static PropertyDescriptor<'static>> { ) -> Option<&'static PropertyDescriptor<'static>> {
let database = rbx_reflection_database::get(); let database = rbx_reflection_database::get().unwrap();
let mut current_class_name = class_name; let mut current_class_name = class_name;
loop { loop {
@@ -248,14 +248,15 @@ fn nonexhaustive_list(values: &[&str]) -> String {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
use crate::json;
fn resolve(class: &str, prop: &str, json_value: &str) -> Variant { fn resolve(class: &str, prop: &str, json_value: &str) -> Variant {
let unresolved: UnresolvedValue = serde_json::from_str(json_value).unwrap(); let unresolved: UnresolvedValue = json::from_str(json_value).unwrap();
unresolved.resolve(class, prop).unwrap() unresolved.resolve(class, prop).unwrap()
} }
fn resolve_unambiguous(json_value: &str) -> Variant { fn resolve_unambiguous(json_value: &str) -> Variant {
let unresolved: UnresolvedValue = serde_json::from_str(json_value).unwrap(); let unresolved: UnresolvedValue = json::from_str(json_value).unwrap();
unresolved.resolve_unambiguous().unwrap() unresolved.resolve_unambiguous().unwrap()
} }

View File

@@ -221,7 +221,7 @@ pub enum InstigatingSource {
ProjectNode( ProjectNode(
#[serde(serialize_with = "path_serializer::serialize_absolute")] PathBuf, #[serde(serialize_with = "path_serializer::serialize_absolute")] PathBuf,
String, String,
ProjectNode, Box<ProjectNode>,
Option<String>, Option<String>,
), ),
} }

View File

@@ -73,7 +73,7 @@ impl RojoTree {
self.inner.root_ref() self.inner.root_ref()
} }
pub fn get_instance(&self, id: Ref) -> Option<InstanceWithMeta> { pub fn get_instance(&self, id: Ref) -> Option<InstanceWithMeta<'_>> {
if let Some(instance) = self.inner.get_by_ref(id) { if let Some(instance) = self.inner.get_by_ref(id) {
let metadata = self.metadata_map.get(&id).unwrap(); let metadata = self.metadata_map.get(&id).unwrap();
@@ -83,7 +83,7 @@ impl RojoTree {
} }
} }
pub fn get_instance_mut(&mut self, id: Ref) -> Option<InstanceWithMetaMut> { pub fn get_instance_mut(&mut self, id: Ref) -> Option<InstanceWithMetaMut<'_>> {
if let Some(instance) = self.inner.get_by_ref_mut(id) { if let Some(instance) = self.inner.get_by_ref_mut(id) {
let metadata = self.metadata_map.get_mut(&id).unwrap(); let metadata = self.metadata_map.get_mut(&id).unwrap();

View File

@@ -1,10 +1,10 @@
use std::path::Path; use std::path::Path;
use anyhow::Context;
use memofs::{IoResultExt, Vfs}; use memofs::{IoResultExt, Vfs};
use rbx_dom_weak::ustr; use rbx_dom_weak::ustr;
use crate::{ use crate::{
json,
lua_ast::{Expression, Statement}, lua_ast::{Expression, Statement},
snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot}, snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot},
}; };
@@ -19,8 +19,9 @@ pub fn snapshot_json(
) -> anyhow::Result<Option<InstanceSnapshot>> { ) -> anyhow::Result<Option<InstanceSnapshot>> {
let contents = vfs.read(path)?; let contents = vfs.read(path)?;
let value: serde_json::Value = serde_json::from_slice(&contents) let value = json::parse_value_from_slice_with_context(&contents, || {
.with_context(|| format!("File contains malformed JSON: {}", path.display()))?; format!("File contains malformed JSON: {}", path.display())
})?;
let as_lua = json_to_lua(value).to_string(); let as_lua = json_to_lua(value).to_string();

View File

@@ -9,6 +9,7 @@ use rbx_dom_weak::{
use serde::Deserialize; use serde::Deserialize;
use crate::{ use crate::{
json,
resolution::UnresolvedValue, resolution::UnresolvedValue,
snapshot::{InstanceContext, InstanceSnapshot}, snapshot::{InstanceContext, InstanceSnapshot},
RojoRef, RojoRef,
@@ -28,8 +29,9 @@ pub fn snapshot_json_model(
return Ok(None); return Ok(None);
} }
let mut instance: JsonModel = serde_json::from_str(contents_str) let mut instance: JsonModel = json::from_str_with_context(contents_str, || {
.with_context(|| format!("File is not a valid JSON model: {}", path.display()))?; format!("File is not a valid JSON model: {}", path.display())
})?;
if let Some(top_level_name) = &instance.name { if let Some(top_level_name) = &instance.name {
let new_name = format!("{}.model.json", top_level_name); let new_name = format!("{}.model.json", top_level_name);

View File

@@ -31,6 +31,7 @@ pub fn snapshot_lua(
script_type: ScriptType, script_type: ScriptType,
) -> anyhow::Result<Option<InstanceSnapshot>> { ) -> anyhow::Result<Option<InstanceSnapshot>> {
let run_context_enums = &rbx_reflection_database::get() let run_context_enums = &rbx_reflection_database::get()
.unwrap()
.enums .enums
.get("RunContext") .get("RunContext")
.expect("Unable to get RunContext enums!") .expect("Unable to get RunContext enums!")

View File

@@ -4,7 +4,7 @@ use anyhow::{format_err, Context};
use rbx_dom_weak::{types::Attributes, Ustr, UstrMap}; use rbx_dom_weak::{types::Attributes, Ustr, UstrMap};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{resolution::UnresolvedValue, snapshot::InstanceSnapshot, RojoRef}; use crate::{json, resolution::UnresolvedValue, snapshot::InstanceSnapshot, RojoRef};
/// Represents metadata in a sibling file with the same basename. /// Represents metadata in a sibling file with the same basename.
/// ///
@@ -34,7 +34,7 @@ pub struct AdjacentMetadata {
impl AdjacentMetadata { impl AdjacentMetadata {
pub fn from_slice(slice: &[u8], path: PathBuf) -> anyhow::Result<Self> { pub fn from_slice(slice: &[u8], path: PathBuf) -> anyhow::Result<Self> {
let mut meta: Self = serde_json::from_slice(slice).with_context(|| { let mut meta: Self = json::from_slice_with_context(slice, || {
format!( format!(
"File contained malformed .meta.json data: {}", "File contained malformed .meta.json data: {}",
path.display() path.display()
@@ -131,7 +131,7 @@ pub struct DirectoryMetadata {
impl DirectoryMetadata { impl DirectoryMetadata {
pub fn from_slice(slice: &[u8], path: PathBuf) -> anyhow::Result<Self> { pub fn from_slice(slice: &[u8], path: PathBuf) -> anyhow::Result<Self> {
let mut meta: Self = serde_json::from_slice(slice).with_context(|| { let mut meta: Self = json::from_slice_with_context(slice, || {
format!( format!(
"File contained malformed init.meta.json data: {}", "File contained malformed init.meta.json data: {}",
path.display() path.display()

View File

@@ -289,7 +289,7 @@ pub fn snapshot_project_node(
metadata.instigating_source = Some(InstigatingSource::ProjectNode( metadata.instigating_source = Some(InstigatingSource::ProjectNode(
project_path.to_path_buf(), project_path.to_path_buf(),
instance_name.to_string(), instance_name.to_string(),
node.clone(), Box::new(node.clone()),
parent_class.map(|name| name.to_owned()), parent_class.map(|name| name.to_owned()),
)); ));
@@ -313,7 +313,7 @@ fn infer_class_name(name: &str, parent_class: Option<&str>) -> Option<Ustr> {
// Members of DataModel with names that match known services are // Members of DataModel with names that match known services are
// probably supposed to be those services. // probably supposed to be those services.
let descriptor = rbx_reflection_database::get().classes.get(name)?; let descriptor = rbx_reflection_database::get().unwrap().classes.get(name)?;
if descriptor.tags.contains(&ClassTag::Service) { if descriptor.tags.contains(&ClassTag::Service) {
return Some(ustr(name)); return Some(ustr(name));

View File

@@ -17,6 +17,7 @@ use rbx_dom_weak::{
}; };
use crate::{ use crate::{
json,
serve_session::ServeSession, serve_session::ServeSession,
snapshot::{InstanceWithMeta, PatchSet, PatchUpdate}, snapshot::{InstanceWithMeta, PatchSet, PatchUpdate},
web::{ web::{
@@ -139,7 +140,7 @@ impl ApiService {
let body = body::to_bytes(request.into_body()).await.unwrap(); let body = body::to_bytes(request.into_body()).await.unwrap();
let request: WriteRequest = match serde_json::from_slice(&body) { let request: WriteRequest = match json::from_slice(&body) {
Ok(request) => request, Ok(request) => request,
Err(err) => { Err(err) => {
return json( return json(

View File

@@ -157,14 +157,20 @@ impl TestServeSession {
let url = format!("http://localhost:{}/api/rojo", self.port); let url = format!("http://localhost:{}/api/rojo", self.port);
let body = reqwest::blocking::get(url)?.text()?; let body = reqwest::blocking::get(url)?.text()?;
Ok(serde_json::from_str(&body).expect("Server returned malformed response")) let value = jsonc_parser::parse_to_serde_value(&body, &Default::default())
.expect("Failed to parse JSON")
.expect("No JSON value");
Ok(serde_json::from_value(value).expect("Server returned malformed response"))
} }
pub fn get_api_read(&self, id: Ref) -> Result<ReadResponse, reqwest::Error> { pub fn get_api_read(&self, id: Ref) -> Result<ReadResponse<'_>, reqwest::Error> {
let url = format!("http://localhost:{}/api/read/{}", self.port, id); let url = format!("http://localhost:{}/api/read/{}", self.port, id);
let body = reqwest::blocking::get(url)?.text()?; let body = reqwest::blocking::get(url)?.text()?;
Ok(serde_json::from_str(&body).expect("Server returned malformed response")) let value = jsonc_parser::parse_to_serde_value(&body, &Default::default())
.expect("Failed to parse JSON")
.expect("No JSON value");
Ok(serde_json::from_value(value).expect("Server returned malformed response"))
} }
pub fn get_api_subscribe( pub fn get_api_subscribe(