Use SerializationService as a fallback for when patch application fails (#1030)

This commit is contained in:
Micah
2025-09-21 15:09:20 -07:00
committed by GitHub
parent 59e36491a5
commit 80a381dbb1
26 changed files with 793 additions and 108 deletions

View File

@@ -2,6 +2,12 @@
## Unreleased ## Unreleased
* Added fallback method for when an Instance can't be synced through normal means ([#1030])
This should make it possible to sync `MeshParts` and `Unions`!
The fallback involves deleting and recreating Instances. This will break
properties that reference them that Rojo does not know about, so be weary.
* Add auto-reconnect and improve UX for sync reminders ([#1096]) * Add auto-reconnect and improve UX for sync reminders ([#1096])
* Add support for syncing `yml` and `yaml` files (behaves similar to JSON and TOML) ([#1093]) * Add support for syncing `yml` and `yaml` files (behaves similar to JSON and TOML) ([#1093])
* Fixed colors of Table diff ([#1084]) * Fixed colors of Table diff ([#1084])
@@ -13,6 +19,7 @@
* Added `--absolute` flag to the sourcemap subcommand, which will emit absolute paths instead of relative paths. ([#1092]) * Added `--absolute` flag to the sourcemap subcommand, which will emit absolute paths instead of relative paths. ([#1092])
* Fixed applying `gameId` and `placeId` before initial sync was accepted ([#1104]) * Fixed applying `gameId` and `placeId` before initial sync was accepted ([#1104])
[#1030]: https://github.com/rojo-rbx/rojo/pull/1030
[#1096]: https://github.com/rojo-rbx/rojo/pull/1096 [#1096]: https://github.com/rojo-rbx/rojo/pull/1096
[#1093]: https://github.com/rojo-rbx/rojo/pull/1093 [#1093]: https://github.com/rojo-rbx/rojo/pull/1093
[#1084]: https://github.com/rojo-rbx/rojo/pull/1084 [#1084]: https://github.com/rojo-rbx/rojo/pull/1084

7
Cargo.lock generated
View File

@@ -401,6 +401,12 @@ dependencies = [
"memchr", "memchr",
] ]
[[package]]
name = "data-encoding"
version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476"
[[package]] [[package]]
name = "diff" name = "diff"
version = "0.1.13" version = "0.1.13"
@@ -1870,6 +1876,7 @@ dependencies = [
"criterion", "criterion",
"crossbeam-channel", "crossbeam-channel",
"csv", "csv",
"data-encoding",
"embed-resource", "embed-resource",
"env_logger", "env_logger",
"fs-err", "fs-err",

View File

@@ -94,6 +94,7 @@ uuid = { version = "1.7.0", features = ["v4", "serde"] }
clap = { version = "3.2.25", features = ["derive"] } clap = { version = "3.2.25", features = ["derive"] }
profiling = "1.0.15" profiling = "1.0.15"
yaml-rust2 = "0.10.3" yaml-rust2 = "0.10.3"
data-encoding = "2.8.0"
[target.'cfg(windows)'.dependencies] [target.'cfg(windows)'.dependencies]
winreg = "0.10.1" winreg = "0.10.1"

View File

@@ -22,6 +22,12 @@ impl RedactionMap {
} }
} }
/// Returns the numeric ID that was assigned to the provided value,
/// if one exists.
pub fn get_id_for_value(&self, value: impl ToString) -> Option<usize> {
self.ids.get(&value.to_string()).cloned()
}
pub fn intern(&mut self, id: impl ToString) { pub fn intern(&mut self, id: impl ToString) {
let last_id = &mut self.last_id; let last_id = &mut self.last_id;

View File

@@ -10,6 +10,8 @@ local Version = require(script.Parent.Version)
local validateApiInfo = Types.ifEnabled(Types.ApiInfoResponse) local validateApiInfo = Types.ifEnabled(Types.ApiInfoResponse)
local validateApiRead = Types.ifEnabled(Types.ApiReadResponse) local validateApiRead = Types.ifEnabled(Types.ApiReadResponse)
local validateApiSubscribe = Types.ifEnabled(Types.ApiSubscribeResponse) local validateApiSubscribe = Types.ifEnabled(Types.ApiSubscribeResponse)
local validateApiSerialize = Types.ifEnabled(Types.ApiSerializeResponse)
local validateApiRefPatch = Types.ifEnabled(Types.ApiRefPatchResponse)
local function rejectFailedRequests(response) local function rejectFailedRequests(response)
if response.code >= 400 then if response.code >= 400 then
@@ -252,4 +254,32 @@ function ApiContext:open(id)
end) end)
end end
function ApiContext:serialize(ids: { string })
local url = ("%s/api/serialize/%s"):format(self.__baseUrl, table.concat(ids, ","))
return Http.get(url):andThen(rejectFailedRequests):andThen(Http.Response.json):andThen(function(body)
if body.sessionId ~= self.__sessionId then
return Promise.reject("Server changed ID")
end
assert(validateApiSerialize(body))
return body
end)
end
function ApiContext:refPatch(ids: { string })
local url = ("%s/api/ref-patch/%s"):format(self.__baseUrl, table.concat(ids, ","))
return Http.get(url):andThen(rejectFailedRequests):andThen(Http.Response.json):andThen(function(body)
if body.sessionId ~= self.__sessionId then
return Promise.reject("Server changed ID")
end
assert(validateApiRefPatch(body))
return body
end)
end
return ApiContext return ApiContext

View File

@@ -181,6 +181,14 @@ function SettingsPage:render()
layoutOrder = layoutIncrement(), layoutOrder = layoutIncrement(),
}), }),
EnableSyncFallback = e(Setting, {
id = "enableSyncFallback",
name = "Enable Sync Fallback",
description = "Whether Instances that fail to sync are remade as a fallback. If this is enabled, Instances may be destroyed and remade when syncing.",
transparency = self.props.transparency,
layoutOrder = layoutIncrement(),
}),
CheckForUpdates = e(Setting, { CheckForUpdates = e(Setting, {
id = "checkForUpdates", id = "checkForUpdates",
name = "Check For Updates", name = "Check For Updates",

View File

@@ -595,13 +595,13 @@ function App:startSession()
twoWaySync = Settings:get("twoWaySync"), twoWaySync = Settings:get("twoWaySync"),
}) })
self.cleanupPrecommit = serveSession.__reconciler:hookPrecommit(function(patch, instanceMap) self.cleanupPrecommit = serveSession:hookPrecommit(function(patch, instanceMap)
-- Build new tree for patch -- Build new tree for patch
self:setState({ self:setState({
patchTree = PatchTree.build(patch, instanceMap, { "Property", "Old", "New" }), patchTree = PatchTree.build(patch, instanceMap, { "Property", "Old", "New" }),
}) })
end) end)
self.cleanupPostcommit = serveSession.__reconciler:hookPostcommit(function(patch, instanceMap, unappliedPatch) self.cleanupPostcommit = serveSession:hookPostcommit(function(patch, instanceMap, unappliedPatch)
-- Update tree with unapplied metadata -- Update tree with unapplied metadata
self:setState(function(prevState) self:setState(function(prevState)
return { return {

View File

@@ -282,6 +282,22 @@ function PatchSet.assign(target, ...)
return target return target
end end
function PatchSet.addedIdList(patchSet): { string }
local idList = table.create(#patchSet.added)
for id in patchSet.added do
table.insert(idList, id)
end
return table.freeze(idList)
end
function PatchSet.updatedIdList(patchSet): { string }
local idList = table.create(#patchSet.updated)
for _, item in patchSet.updated do
table.insert(idList, item.id)
end
return table.freeze(idList)
end
--[[ --[[
Create a list of human-readable statements summarizing the contents of this Create a list of human-readable statements summarizing the contents of this
patch, intended to be displayed to users. patch, intended to be displayed to users.

View File

@@ -5,8 +5,6 @@
Patches can come from the server or be generated by the client. Patches can come from the server or be generated by the client.
]] ]]
local ChangeHistoryService = game:GetService("ChangeHistoryService")
local Packages = script.Parent.Parent.Parent.Packages local Packages = script.Parent.Parent.Parent.Packages
local Log = require(Packages.Log) local Log = require(Packages.Log)
@@ -20,13 +18,6 @@ local reifyInstance, applyDeferredRefs = reify.reifyInstance, reify.applyDeferre
local setProperty = require(script.Parent.setProperty) local setProperty = require(script.Parent.setProperty)
local function applyPatch(instanceMap, patch) local function applyPatch(instanceMap, patch)
local patchTimestamp = DateTime.now():FormatLocalTime("LTS", "en-us")
local historyRecording = ChangeHistoryService:TryBeginRecording("Rojo: Patch " .. patchTimestamp)
if not historyRecording then
-- There can only be one recording at a time
Log.debug("Failed to begin history recording for " .. patchTimestamp .. ". Another recording is in progress.")
end
-- Tracks any portions of the patch that could not be applied to the DOM. -- Tracks any portions of the patch that could not be applied to the DOM.
local unappliedPatch = PatchSet.newEmpty() local unappliedPatch = PatchSet.newEmpty()
@@ -73,9 +64,6 @@ local function applyPatch(instanceMap, patch)
if parentInstance == nil then if parentInstance == nil then
-- This would be peculiar. If you create an instance with no -- This would be peculiar. If you create an instance with no
-- parent, were you supposed to create it at all? -- parent, were you supposed to create it at all?
if historyRecording then
ChangeHistoryService:FinishRecording(historyRecording, Enum.FinishRecordingOperation.Commit)
end
invariant( invariant(
"Cannot add an instance from a patch that has no parent.\nInstance {} with parent {}.\nState: {:#?}", "Cannot add an instance from a patch that has no parent.\nInstance {} with parent {}.\nState: {:#?}",
id, id,
@@ -244,10 +232,6 @@ local function applyPatch(instanceMap, patch)
end end
end end
if historyRecording then
ChangeHistoryService:FinishRecording(historyRecording, Enum.FinishRecordingOperation.Commit)
end
applyDeferredRefs(instanceMap, deferredRefs, unappliedPatch) applyDeferredRefs(instanceMap, deferredRefs, unappliedPatch)
return unappliedPatch return unappliedPatch

View File

@@ -5,9 +5,6 @@
local Rojo = script:FindFirstAncestor("Rojo") local Rojo = script:FindFirstAncestor("Rojo")
local Plugin = Rojo.Plugin local Plugin = Rojo.Plugin
local Packages = Rojo.Packages
local Log = require(Packages.Log)
local Timer = require(Plugin.Timer) local Timer = require(Plugin.Timer)
@@ -22,78 +19,17 @@ function Reconciler.new(instanceMap)
local self = { local self = {
-- Tracks all of the instances known by the reconciler by ID. -- Tracks all of the instances known by the reconciler by ID.
__instanceMap = instanceMap, __instanceMap = instanceMap,
__precommitCallbacks = {},
__postcommitCallbacks = {},
} }
return setmetatable(self, Reconciler) return setmetatable(self, Reconciler)
end end
function Reconciler:hookPrecommit(callback: (patch: any, instanceMap: any) -> ()): () -> ()
table.insert(self.__precommitCallbacks, callback)
Log.trace("Added precommit callback: {}", callback)
return function()
-- Remove the callback from the list
for i, cb in self.__precommitCallbacks do
if cb == callback then
table.remove(self.__precommitCallbacks, i)
Log.trace("Removed precommit callback: {}", callback)
break
end
end
end
end
function Reconciler:hookPostcommit(callback: (patch: any, instanceMap: any, unappliedPatch: any) -> ()): () -> ()
table.insert(self.__postcommitCallbacks, callback)
Log.trace("Added postcommit callback: {}", callback)
return function()
-- Remove the callback from the list
for i, cb in self.__postcommitCallbacks do
if cb == callback then
table.remove(self.__postcommitCallbacks, i)
Log.trace("Removed postcommit callback: {}", callback)
break
end
end
end
end
function Reconciler:applyPatch(patch) function Reconciler:applyPatch(patch)
Timer.start("Reconciler:applyPatch") Timer.start("Reconciler:applyPatch")
Timer.start("precommitCallbacks")
-- Precommit callbacks must be serial in order to obey the contract that
-- they execute before commit
for _, callback in self.__precommitCallbacks do
local success, err = pcall(callback, patch, self.__instanceMap)
if not success then
Log.warn("Precommit hook errored: {}", err)
end
end
Timer.stop()
Timer.start("apply")
local unappliedPatch = applyPatch(self.__instanceMap, patch) local unappliedPatch = applyPatch(self.__instanceMap, patch)
Timer.stop()
Timer.start("postcommitCallbacks")
-- Postcommit callbacks can be called with spawn since regardless of firing order, they are
-- guaranteed to be called after the commit
for _, callback in self.__postcommitCallbacks do
task.spawn(function()
local success, err = pcall(callback, patch, self.__instanceMap, unappliedPatch)
if not success then
Log.warn("Postcommit hook errored: {}", err)
end
end)
end
Timer.stop()
Timer.stop() Timer.stop()
return unappliedPatch return unappliedPatch
end end

View File

@@ -1,11 +1,15 @@
local StudioService = game:GetService("StudioService") local StudioService = game:GetService("StudioService")
local RunService = game:GetService("RunService") local RunService = game:GetService("RunService")
local ChangeHistoryService = game:GetService("ChangeHistoryService")
local SerializationService = game:GetService("SerializationService")
local Selection = game:GetService("Selection")
local Packages = script.Parent.Parent.Packages local Packages = script.Parent.Parent.Packages
local Log = require(Packages.Log) local Log = require(Packages.Log)
local Fmt = require(Packages.Fmt) local Fmt = require(Packages.Fmt)
local t = require(Packages.t) local t = require(Packages.t)
local Promise = require(Packages.Promise) local Promise = require(Packages.Promise)
local Timer = require(script.Parent.Timer)
local ChangeBatcher = require(script.Parent.ChangeBatcher) local ChangeBatcher = require(script.Parent.ChangeBatcher)
local encodePatchUpdate = require(script.Parent.ChangeBatcher.encodePatchUpdate) local encodePatchUpdate = require(script.Parent.ChangeBatcher.encodePatchUpdate)
@@ -95,6 +99,8 @@ function ServeSession.new(options)
__changeBatcher = changeBatcher, __changeBatcher = changeBatcher,
__statusChangedCallback = nil, __statusChangedCallback = nil,
__connections = connections, __connections = connections,
__precommitCallbacks = {},
__postcommitCallbacks = {},
} }
setmetatable(self, ServeSession) setmetatable(self, ServeSession)
@@ -125,12 +131,46 @@ function ServeSession:setConfirmCallback(callback)
self.__userConfirmCallback = callback self.__userConfirmCallback = callback
end end
--[=[
Hooks a function to run before patch application.
The provided function is called with the incoming patch and an InstanceMap
as parameters.
]=]
function ServeSession:hookPrecommit(callback) function ServeSession:hookPrecommit(callback)
return self.__reconciler:hookPrecommit(callback) table.insert(self.__precommitCallbacks, callback)
Log.trace("Added precommit callback: {}", callback)
return function()
-- Remove the callback from the list
for i, cb in self.__precommitCallbacks do
if cb == callback then
table.remove(self.__precommitCallbacks, i)
Log.trace("Removed precommit callback: {}", callback)
break
end
end
end
end end
--[=[
Hooks a function to run after patch application.
The provided function is called with the applied patch, the current
InstanceMap, and a PatchSet containing any unapplied changes.
]=]
function ServeSession:hookPostcommit(callback) function ServeSession:hookPostcommit(callback)
return self.__reconciler:hookPostcommit(callback) table.insert(self.__postcommitCallbacks, callback)
Log.trace("Added postcommit callback: {}", callback)
return function()
-- Remove the callback from the list
for i, cb in self.__postcommitCallbacks do
if cb == callback then
table.remove(self.__postcommitCallbacks, i)
Log.trace("Removed postcommit callback: {}", callback)
break
end
end
end
end end
function ServeSession:start() function ServeSession:start()
@@ -206,6 +246,169 @@ function ServeSession:__onActiveScriptChanged(activeScript)
self.__apiContext:open(scriptId) self.__apiContext:open(scriptId)
end end
function ServeSession:__replaceInstances(idList)
if #idList == 0 then
return true, PatchSet.newEmpty()
end
-- It would be annoying if selection went away, so we try to preserve it.
local selection = Selection:Get()
local selectionMap = {}
for i, instance in selection do
selectionMap[instance] = i
end
-- TODO: Should we do this in multiple requests so we can more granularly mark failures?
local modelSuccess, replacements = self.__apiContext
:serialize(idList)
:andThen(function(response)
Log.debug("Deserializing results from serialize endpoint")
local objects = SerializationService:DeserializeInstancesAsync(response.modelContents)
if not objects[1] then
return Promise.reject("Serialize endpoint did not deserialize into any Instances")
end
if #objects[1]:GetChildren() ~= #idList then
return Promise.reject("Serialize endpoint did not return the correct number of Instances")
end
local instanceMap = {}
for _, item in objects[1]:GetChildren() do
instanceMap[item.Name] = item.Value
end
return instanceMap
end)
:await()
local refSuccess, refPatch = self.__apiContext
:refPatch(idList)
:andThen(function(response)
return response.patch
end)
:await()
if not (modelSuccess and refSuccess) then
return false
end
for id, replacement in replacements do
local oldInstance = self.__instanceMap.fromIds[id]
self.__instanceMap:insert(id, replacement)
Log.trace("Swapping Instance {} out via api/models/ endpoint", id)
local oldParent = oldInstance.Parent
for _, child in oldInstance:GetChildren() do
child.Parent = replacement
end
replacement.Parent = oldParent
-- ChangeHistoryService doesn't like it if an Instance has been
-- Destroyed. So, we have to accept the potential memory hit and
-- just set the parent to `nil`.
oldInstance.Parent = nil
if selectionMap[oldInstance] then
-- This is a bit funky, but it saves the order of Selection
-- which might matter for some use cases.
selection[selectionMap[oldInstance]] = replacement
end
end
local patchApplySuccess, unappliedPatch = pcall(self.__reconciler.applyPatch, self.__reconciler, refPatch)
if patchApplySuccess then
Selection:Set(selection)
return true, unappliedPatch
else
error(unappliedPatch)
end
end
function ServeSession:__applyPatch(patch)
local patchTimestamp = DateTime.now():FormatLocalTime("LTS", "en-us")
local historyRecording = ChangeHistoryService:TryBeginRecording("Rojo: Patch " .. patchTimestamp)
if not historyRecording then
-- There can only be one recording at a time
Log.debug("Failed to begin history recording for " .. patchTimestamp .. ". Another recording is in progress.")
end
Timer.start("precommitCallbacks")
-- Precommit callbacks must be serial in order to obey the contract that
-- they execute before commit
for _, callback in self.__precommitCallbacks do
local success, err = pcall(callback, patch, self.__instanceMap)
if not success then
Log.warn("Precommit hook errored: {}", err)
end
end
Timer.stop()
local patchApplySuccess, unappliedPatch = pcall(self.__reconciler.applyPatch, self.__reconciler, patch)
if not patchApplySuccess then
if historyRecording then
ChangeHistoryService:FinishRecording(historyRecording, Enum.FinishRecordingOperation.Commit)
end
-- This might make a weird stack trace but the only way applyPatch can
-- fail is if a bug occurs so it's probably fine.
error(unappliedPatch)
end
if PatchSet.isEmpty(unappliedPatch) then
if historyRecording then
ChangeHistoryService:FinishRecording(historyRecording, Enum.FinishRecordingOperation.Commit)
end
return
end
local addedIdList = PatchSet.addedIdList(unappliedPatch)
local updatedIdList = PatchSet.updatedIdList(unappliedPatch)
local actualUnappliedPatches = PatchSet.newEmpty()
if Settings:get("enableSyncFallback") then
Log.debug("ServeSession:__replaceInstances(unappliedPatch.added)")
Timer.start("ServeSession:__replaceInstances(unappliedPatch.added)")
local addSuccess, unappliedAddedRefs = self:__replaceInstances(addedIdList)
Timer.stop()
Log.debug("ServeSession:__replaceInstances(unappliedPatch.updated)")
Timer.start("ServeSession:__replaceInstances(unappliedPatch.updated)")
local updateSuccess, unappliedUpdateRefs = self:__replaceInstances(updatedIdList)
Timer.stop()
if addSuccess then
table.clear(unappliedPatch.added)
PatchSet.assign(actualUnappliedPatches, unappliedAddedRefs)
end
if updateSuccess then
table.clear(unappliedPatch.updated)
PatchSet.assign(actualUnappliedPatches, unappliedUpdateRefs)
end
else
Log.debug("Skipping ServeSession:__replaceInstances because of setting")
end
PatchSet.assign(actualUnappliedPatches, unappliedPatch)
if not PatchSet.isEmpty(actualUnappliedPatches) then
Log.debug(
"Could not apply all changes requested by the Rojo server:\n{}",
PatchSet.humanSummary(self.__instanceMap, unappliedPatch)
)
end
Timer.start("postcommitCallbacks")
-- Postcommit callbacks can be called with spawn since regardless of firing order, they are
-- guaranteed to be called after the commit
for _, callback in self.__postcommitCallbacks do
task.spawn(function()
local success, err = pcall(callback, patch, self.__instanceMap, actualUnappliedPatches)
if not success then
Log.warn("Postcommit hook errored: {}", err)
end
end)
end
Timer.stop()
if historyRecording then
ChangeHistoryService:FinishRecording(historyRecording, Enum.FinishRecordingOperation.Commit)
end
end
function ServeSession:__initialSync(serverInfo) function ServeSession:__initialSync(serverInfo)
return self.__apiContext:read({ serverInfo.rootInstanceId }):andThen(function(readResponseBody) return self.__apiContext:read({ serverInfo.rootInstanceId }):andThen(function(readResponseBody)
-- Tell the API Context that we're up-to-date with the version of -- Tell the API Context that we're up-to-date with the version of
@@ -280,15 +483,7 @@ function ServeSession:__initialSync(serverInfo)
return self.__apiContext:write(inversePatch) return self.__apiContext:write(inversePatch)
elseif userDecision == "Accept" then elseif userDecision == "Accept" then
local unappliedPatch = self.__reconciler:applyPatch(catchUpPatch) self:__applyPatch(catchUpPatch)
if not PatchSet.isEmpty(unappliedPatch) then
Log.debug(
"Could not apply all changes requested by the Rojo server:\n{}",
PatchSet.humanSummary(self.__instanceMap, unappliedPatch)
)
end
return Promise.resolve() return Promise.resolve()
else else
return Promise.reject("Invalid user decision: " .. userDecision) return Promise.reject("Invalid user decision: " .. userDecision)
@@ -311,14 +506,7 @@ function ServeSession:__mainSyncLoop()
Log.trace("Serve session {} retrieved {} messages", tostring(self), #messages) Log.trace("Serve session {} retrieved {} messages", tostring(self), #messages)
for _, message in messages do for _, message in messages do
local unappliedPatch = self.__reconciler:applyPatch(message) self:__applyPatch(message)
if not PatchSet.isEmpty(unappliedPatch) then
Log.debug(
"Could not apply all changes requested by the Rojo server:\n{}",
PatchSet.humanSummary(self.__instanceMap, unappliedPatch)
)
end
end end
end) end)
:await() :await()

View File

@@ -14,6 +14,7 @@ local defaultSettings = {
twoWaySync = false, twoWaySync = false,
autoReconnect = false, autoReconnect = false,
showNotifications = true, showNotifications = true,
enableSyncFallback = true,
syncReminderMode = "Notify" :: "None" | "Notify" | "Fullscreen", syncReminderMode = "Notify" :: "None" | "Notify" | "Fullscreen",
syncReminderPolling = true, syncReminderPolling = true,
checkForUpdates = true, checkForUpdates = true,

View File

@@ -55,6 +55,16 @@ local ApiSubscribeResponse = t.interface({
messages = t.array(ApiSubscribeMessage), messages = t.array(ApiSubscribeMessage),
}) })
local ApiSerializeResponse = t.interface({
sessionId = t.string,
modelContents = t.buffer,
})
local ApiRefPatchResponse = t.interface({
sessionId = t.string,
patch = ApiSubscribeMessage,
})
local ApiError = t.interface({ local ApiError = t.interface({
kind = t.union(t.literal("NotFound"), t.literal("BadRequest"), t.literal("InternalError")), kind = t.union(t.literal("NotFound"), t.literal("BadRequest"), t.literal("InternalError")),
details = t.string, details = t.string,
@@ -82,6 +92,8 @@ return strict("Types", {
ApiInstanceUpdate = ApiInstanceUpdate, ApiInstanceUpdate = ApiInstanceUpdate,
ApiInstanceMetadata = ApiInstanceMetadata, ApiInstanceMetadata = ApiInstanceMetadata,
ApiSubscribeMessage = ApiSubscribeMessage, ApiSubscribeMessage = ApiSubscribeMessage,
ApiSerializeResponse = ApiSerializeResponse,
ApiRefPatchResponse = ApiRefPatchResponse,
ApiValue = ApiValue, ApiValue = ApiValue,
RbxId = RbxId, RbxId = RbxId,

View File

@@ -0,0 +1,16 @@
---
source: tests/tests/serve.rs
expression: "read_response.intern_and_redact(&mut redactions, root_id)"
---
instances:
id-2:
Children: []
ClassName: Attachment
Id: id-2
Metadata:
ignoreUnknownInstances: true
Name: forced_parent
Parent: "00000000000000000000000000000000"
Properties: {}
messageCursor: 0
sessionId: id-1

View File

@@ -0,0 +1,13 @@
---
source: tests/tests/serve.rs
expression: redactions.redacted_yaml(&info)
---
expectedPlaceIds: ~
gameId: ~
placeId: ~
projectName: forced_parent
protocolVersion: 4
rootInstanceId: id-2
serverVersion: "[server-version]"
sessionId: id-1
unexpectedPlaceIds: ~

View File

@@ -0,0 +1,27 @@
---
source: tests/tests/serve.rs
expression: model
---
<roblox version="4">
<Item class="Folder" referent="0">
<Properties>
<string name="Name">Folder</string>
</Properties>
<Item class="ObjectValue" referent="1">
<Properties>
<string name="Name">id-2</string>
<Ref name="Value">2</Ref>
</Properties>
<Item class="Part" referent="3">
<Properties>
<string name="Name">Part</string>
</Properties>
<Item class="Attachment" referent="2">
<Properties>
<string name="Name">forced_parent</string>
</Properties>
</Item>
</Item>
</Item>
</Item>
</roblox>

View File

@@ -0,0 +1,58 @@
---
source: tests/tests/serve.rs
expression: "read_response.intern_and_redact(&mut redactions, root_id)"
---
instances:
id-2:
Children:
- id-3
ClassName: DataModel
Id: id-2
Metadata:
ignoreUnknownInstances: true
Name: meshpart
Parent: "00000000000000000000000000000000"
Properties: {}
id-3:
Children:
- id-4
- id-5
ClassName: Workspace
Id: id-3
Metadata:
ignoreUnknownInstances: true
Name: Workspace
Parent: id-2
Properties:
NeedsPivotMigration:
Bool: false
id-4:
Children: []
ClassName: ObjectValue
Id: id-4
Metadata:
ignoreUnknownInstances: true
Name: ObjectValue
Parent: id-3
Properties:
Attributes:
Attributes:
Rojo_Target_Value:
String: sword
Value:
Ref: id-5
id-5:
Children: []
ClassName: MeshPart
Id: id-5
Metadata:
ignoreUnknownInstances: true
Name: Sword
Parent: id-3
Properties:
MeshId:
ContentId: "rbxasset://fonts/sword.mesh"
TextureID:
ContentId: "rbxasset://textures/SwordTexture.png"
messageCursor: 0
sessionId: id-1

View File

@@ -0,0 +1,13 @@
---
source: tests/tests/serve.rs
expression: redactions.redacted_yaml(&info)
---
expectedPlaceIds: ~
gameId: ~
placeId: ~
projectName: meshpart
protocolVersion: 4
rootInstanceId: id-2
serverVersion: "[server-version]"
sessionId: id-1
unexpectedPlaceIds: ~

View File

@@ -0,0 +1,43 @@
---
source: tests/tests/serve.rs
expression: model
---
<roblox version="4">
<Item class="Folder" referent="0">
<Properties>
<string name="Name">Folder</string>
</Properties>
<Item class="ObjectValue" referent="1">
<Properties>
<string name="Name">id-5</string>
<BinaryString name="AttributesSerialize"></BinaryString>
<Ref name="Value">2</Ref>
</Properties>
<Item class="MeshPart" referent="2">
<Properties>
<string name="Name">Sword</string>
<Content name="MeshContent">
<uri>rbxasset://fonts/sword.mesh</uri>
</Content>
<Content name="TextureContent">
<uri>rbxasset://textures/SwordTexture.png</uri>
</Content>
</Properties>
</Item>
</Item>
<Item class="ObjectValue" referent="3">
<Properties>
<string name="Name">id-4</string>
<BinaryString name="AttributesSerialize"></BinaryString>
<Ref name="Value">4</Ref>
</Properties>
<Item class="ObjectValue" referent="4">
<Properties>
<string name="Name">ObjectValue</string>
<BinaryString name="AttributesSerialize">AQAAABEAAABSb2pvX1RhcmdldF9WYWx1ZQIFAAAAc3dvcmQ=</BinaryString>
<Ref name="Value">null</Ref>
</Properties>
</Item>
</Item>
</Item>
</roblox>

View File

@@ -0,0 +1,6 @@
{
"name": "forced_parent",
"tree": {
"$className": "Attachment"
}
}

View File

@@ -0,0 +1,22 @@
{
"name": "meshpart",
"tree": {
"$className": "DataModel",
"Workspace": {
"Sword": {
"$id": "sword",
"$className": "MeshPart",
"$properties": {
"MeshId": "rbxasset://fonts/sword.mesh",
"TextureID": "rbxasset://textures/SwordTexture.png"
}
},
"ObjectValue": {
"$className": "ObjectValue",
"$attributes": {
"Rojo_Target_Value": "sword"
}
}
}
}
}

View File

@@ -1,11 +1,20 @@
//! Defines Rojo's HTTP API, all under /api. These endpoints generally return //! Defines Rojo's HTTP API, all under /api. These endpoints generally return
//! JSON. //! JSON.
use std::{collections::HashMap, fs, path::PathBuf, str::FromStr, sync::Arc}; use std::{
collections::{HashMap, HashSet},
fs,
path::PathBuf,
str::FromStr,
sync::Arc,
};
use hyper::{body, Body, Method, Request, Response, StatusCode}; use hyper::{body, Body, Method, Request, Response, StatusCode};
use opener::OpenError; use opener::OpenError;
use rbx_dom_weak::types::Ref; use rbx_dom_weak::{
types::{Ref, Variant},
InstanceBuilder, UstrMap, WeakDom,
};
use crate::{ use crate::{
serve_session::ServeSession, serve_session::ServeSession,
@@ -18,6 +27,7 @@ use crate::{
}, },
util::{json, json_ok}, util::{json, json_ok},
}, },
web_api::{BufferEncode, InstanceUpdate, RefPatchResponse, SerializeResponse},
}; };
pub async fn call(serve_session: Arc<ServeSession>, request: Request<Body>) -> Response<Body> { pub async fn call(serve_session: Arc<ServeSession>, request: Request<Body>) -> Response<Body> {
@@ -31,10 +41,16 @@ pub async fn call(serve_session: Arc<ServeSession>, request: Request<Body>) -> R
(&Method::GET, path) if path.starts_with("/api/subscribe/") => { (&Method::GET, path) if path.starts_with("/api/subscribe/") => {
service.handle_api_subscribe(request).await service.handle_api_subscribe(request).await
} }
(&Method::GET, path) if path.starts_with("/api/serialize/") => {
service.handle_api_serialize(request).await
}
(&Method::GET, path) if path.starts_with("/api/ref-patch/") => {
service.handle_api_ref_patch(request).await
}
(&Method::POST, path) if path.starts_with("/api/open/") => { (&Method::POST, path) if path.starts_with("/api/open/") => {
service.handle_api_open(request).await service.handle_api_open(request).await
} }
(&Method::POST, "/api/write") => service.handle_api_write(request).await, (&Method::POST, "/api/write") => service.handle_api_write(request).await,
(_method, path) => json( (_method, path) => json(
@@ -201,6 +217,126 @@ impl ApiService {
}) })
} }
/// Accepts a list of IDs and returns them serialized as a binary model.
/// The model is sent in a schema that causes Roblox to deserialize it as
/// a Luau `buffer`.
///
/// The returned model is a folder that contains ObjectValues with names
/// that correspond to the requested Instances. These values have their
/// `Value` property set to point to the requested Instance.
async fn handle_api_serialize(&self, request: Request<Body>) -> Response<Body> {
let argument = &request.uri().path()["/api/serialize/".len()..];
let requested_ids: Result<Vec<Ref>, _> = argument.split(',').map(Ref::from_str).collect();
let requested_ids = match requested_ids {
Ok(ids) => ids,
Err(_) => {
return json(
ErrorResponse::bad_request("Malformed ID list"),
StatusCode::BAD_REQUEST,
);
}
};
let mut response_dom = WeakDom::new(InstanceBuilder::new("Folder"));
let tree = self.serve_session.tree();
for id in &requested_ids {
if let Some(instance) = tree.get_instance(*id) {
let clone = response_dom.insert(
Ref::none(),
InstanceBuilder::new(instance.class_name())
.with_name(instance.name())
.with_properties(instance.properties().clone()),
);
let object_value = response_dom.insert(
response_dom.root_ref(),
InstanceBuilder::new("ObjectValue")
.with_name(id.to_string())
.with_property("Value", clone),
);
let mut child_ref = clone;
if let Some(parent_class) = parent_requirements(&instance.class_name()) {
child_ref =
response_dom.insert(object_value, InstanceBuilder::new(parent_class));
response_dom.transfer_within(clone, child_ref);
}
response_dom.transfer_within(child_ref, object_value);
} else {
json(
ErrorResponse::bad_request(format!("provided id {id} is not in the tree")),
StatusCode::BAD_REQUEST,
);
}
}
drop(tree);
let mut source = Vec::new();
rbx_binary::to_writer(&mut source, &response_dom, &[response_dom.root_ref()]).unwrap();
json_ok(SerializeResponse {
session_id: self.serve_session.session_id(),
model_contents: BufferEncode::new(source),
})
}
/// Returns a list of all referent properties that point towards the
/// provided IDs. Used because the plugin does not store a RojoTree,
/// and referent properties need to be updated after the serialize
/// endpoint is used.
async fn handle_api_ref_patch(self, request: Request<Body>) -> Response<Body> {
let argument = &request.uri().path()["/api/ref-patch/".len()..];
let requested_ids: Result<HashSet<Ref>, _> =
argument.split(',').map(Ref::from_str).collect();
let requested_ids = match requested_ids {
Ok(ids) => ids,
Err(_) => {
return json(
ErrorResponse::bad_request("Malformed ID list"),
StatusCode::BAD_REQUEST,
);
}
};
let mut instance_updates: HashMap<Ref, InstanceUpdate> = HashMap::new();
let tree = self.serve_session.tree();
for instance in tree.descendants(tree.get_root_id()) {
for (prop_name, prop_value) in instance.properties() {
let Variant::Ref(prop_value) = prop_value else {
continue;
};
if let Some(target_id) = requested_ids.get(prop_value) {
let instance_id = instance.id();
let update =
instance_updates
.entry(instance_id)
.or_insert_with(|| InstanceUpdate {
id: instance_id,
changed_class_name: None,
changed_name: None,
changed_metadata: None,
changed_properties: UstrMap::default(),
});
update
.changed_properties
.insert(*prop_name, Some(Variant::Ref(*target_id)));
}
}
}
json_ok(RefPatchResponse {
session_id: self.serve_session.session_id(),
patch: SubscribeMessage {
added: HashMap::new(),
removed: Vec::new(),
updated: instance_updates.into_values().collect(),
},
})
}
/// Open a script with the given ID in the user's default text editor. /// Open a script with the given ID in the user's default text editor.
async fn handle_api_open(&self, request: Request<Body>) -> Response<Body> { async fn handle_api_open(&self, request: Request<Body>) -> Response<Body> {
let argument = &request.uri().path()["/api/open/".len()..]; let argument = &request.uri().path()["/api/open/".len()..];
@@ -306,3 +442,17 @@ fn pick_script_path(instance: InstanceWithMeta<'_>) -> Option<PathBuf> {
}) })
.map(|path| path.to_owned()) .map(|path| path.to_owned())
} }
/// Certain Instances MUST be a child of specific classes. This function
/// tracks that information for the Serialize endpoint.
///
/// If a parent requirement exists, it will be returned.
/// Otherwise returns `None`.
fn parent_requirements(class: &str) -> Option<&str> {
Some(match class {
"Attachment" | "Bone" => "Part",
"Animator" => "Humanoid",
"BaseWrap" | "WrapLayer" | "WrapTarget" | "WrapDeformer" => "MeshPart",
_ => return None,
})
}

View File

@@ -208,6 +208,44 @@ pub struct OpenResponse {
pub session_id: SessionId, pub session_id: SessionId,
} }
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct SerializeResponse {
pub session_id: SessionId,
pub model_contents: BufferEncode,
}
/// Using this struct we can force Roblox to JSONDecode this as a buffer.
/// This is what Roblox's serde APIs use, so it saves a step in the plugin.
#[derive(Debug, Serialize, Deserialize)]
pub struct BufferEncode {
m: (),
t: Cow<'static, str>,
base64: String,
}
impl BufferEncode {
pub fn new(content: Vec<u8>) -> Self {
let base64 = data_encoding::BASE64.encode(&content);
Self {
m: (),
t: Cow::Borrowed("buffer"),
base64,
}
}
pub fn model(&self) -> &str {
&self.base64
}
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RefPatchResponse<'a> {
pub session_id: SessionId,
pub patch: SubscribeMessage<'a>,
}
/// General response type returned from all Rojo routes /// General response type returned from all Rojo routes
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]

View File

@@ -1,4 +1,5 @@
use std::{ use std::{
fmt::Write as _,
fs, fs,
path::{Path, PathBuf}, path::{Path, PathBuf},
process::Command, process::Command,
@@ -11,7 +12,7 @@ use rbx_dom_weak::types::Ref;
use tempfile::{tempdir, TempDir}; use tempfile::{tempdir, TempDir};
use librojo::web_api::{ReadResponse, ServerInfoResponse, SubscribeResponse}; use librojo::web_api::{ReadResponse, SerializeResponse, ServerInfoResponse, SubscribeResponse};
use rojo_insta_ext::RedactionMap; use rojo_insta_ext::RedactionMap;
use crate::rojo_test::io_util::{ use crate::rojo_test::io_util::{
@@ -174,6 +175,18 @@ impl TestServeSession {
reqwest::blocking::get(url)?.json() reqwest::blocking::get(url)?.json()
} }
pub fn get_api_serialize(&self, ids: &[Ref]) -> Result<SerializeResponse, reqwest::Error> {
let mut id_list = String::with_capacity(ids.len() * 33);
for id in ids {
write!(id_list, "{id},").unwrap();
}
id_list.pop();
let url = format!("http://localhost:{}/api/serialize/{}", self.port, id_list);
reqwest::blocking::get(url)?.json()
}
} }
/// Probably-okay way to generate random enough port numbers for running the /// Probably-okay way to generate random enough port numbers for running the
@@ -187,3 +200,27 @@ fn get_port_number() -> usize {
NEXT_PORT_NUMBER.fetch_add(1, Ordering::SeqCst) NEXT_PORT_NUMBER.fetch_add(1, Ordering::SeqCst)
} }
/// Takes a SerializeResponse and creates an XML model out of the response.
///
/// Since the provided structure intentionally includes unredacted referents,
/// some post-processing is done to ensure they don't show up in the model.
pub fn serialize_to_xml_model(response: &SerializeResponse, redactions: &RedactionMap) -> String {
let model_content = data_encoding::BASE64
.decode(response.model_contents.model().as_bytes())
.unwrap();
let mut dom = rbx_binary::from_reader(model_content.as_slice()).unwrap();
// This makes me realize that maybe we need a `descendants_mut` iter.
let ref_list: Vec<Ref> = dom.descendants().map(|inst| inst.referent()).collect();
for referent in ref_list {
let inst = dom.get_by_ref_mut(referent).unwrap();
if let Some(id) = redactions.get_id_for_value(&inst.name) {
inst.name = format!("id-{id}");
}
}
let mut data = Vec::new();
rbx_xml::to_writer_default(&mut data, &dom, dom.root().children()).unwrap();
String::from_utf8(data).expect("rbx_xml should never produce invalid utf-8")
}

View File

@@ -1,9 +1,12 @@
use std::fs; use std::fs;
use insta::{assert_yaml_snapshot, with_settings}; use insta::{assert_snapshot, assert_yaml_snapshot, with_settings};
use tempfile::tempdir; use tempfile::tempdir;
use crate::rojo_test::{internable::InternAndRedact, serve_util::run_serve_test}; use crate::rojo_test::{
internable::InternAndRedact,
serve_util::{run_serve_test, serialize_to_xml_model},
};
#[test] #[test]
fn empty() { fn empty() {
@@ -591,3 +594,66 @@ fn model_pivot_migration() {
); );
}); });
} }
#[test]
fn meshpart_with_id() {
run_serve_test("meshpart_with_id", |session, mut redactions| {
let info = session.get_api_rojo().unwrap();
let root_id = info.root_instance_id;
assert_yaml_snapshot!("meshpart_with_id_info", redactions.redacted_yaml(&info));
let read_response = session.get_api_read(root_id).unwrap();
assert_yaml_snapshot!(
"meshpart_with_id_all",
read_response.intern_and_redact(&mut redactions, root_id)
);
// This is a bit awkward, but it's fine.
let (meshpart, _) = read_response
.instances
.iter()
.find(|(_, inst)| inst.class_name == "MeshPart")
.unwrap();
let (objectvalue, _) = read_response
.instances
.iter()
.find(|(_, inst)| inst.class_name == "ObjectValue")
.unwrap();
let serialize_response = session
.get_api_serialize(&[*meshpart, *objectvalue])
.unwrap();
// We don't assert a snapshot on the SerializeResponse because the model includes the
// Refs from the DOM as names, which means it will obviously be different every time
// this code runs. Still, we ensure that the SessionId is right at least.
assert_eq!(serialize_response.session_id, info.session_id);
let model = serialize_to_xml_model(&serialize_response, &redactions);
assert_snapshot!("meshpart_with_id_serialize_model", model);
});
}
#[test]
fn forced_parent() {
run_serve_test("forced_parent", |session, mut redactions| {
let info = session.get_api_rojo().unwrap();
let root_id = info.root_instance_id;
assert_yaml_snapshot!("forced_parent_info", redactions.redacted_yaml(&info));
let read_response = session.get_api_read(root_id).unwrap();
assert_yaml_snapshot!(
"forced_parent_all",
read_response.intern_and_redact(&mut redactions, root_id)
);
let serialize_response = session.get_api_serialize(&[root_id]).unwrap();
assert_eq!(serialize_response.session_id, info.session_id);
let model = serialize_to_xml_model(&serialize_response, &redactions);
assert_snapshot!("forced_parent_serialize_model", model);
});
}