rename DeleteSet=>IdSet and add utilities and perf improvements

This commit is contained in:
Kevin Jahns
2025-04-08 20:50:20 +02:00
parent 632ec45020
commit 8686113ea2
25 changed files with 780 additions and 786 deletions

View File

@@ -17,7 +17,7 @@ export default [{
output: {
dir: 'dist',
format: 'cjs',
entryFileNames : '[name].cjs',
entryFileNames: '[name].cjs',
sourcemap: true
},
plugins: [
@@ -34,7 +34,7 @@ export default [{
output: {
dir: 'dist',
format: 'esm',
entryFileNames : '[name].mjs',
entryFileNames: '[name].mjs',
sourcemap: true
},
plugins: [

View File

@@ -42,8 +42,6 @@ export {
getState,
Snapshot,
createSnapshot,
createDeleteSet,
createDeleteSetFromStructStore,
cleanupYTextFormatting,
snapshot,
emptySnapshot,
@@ -56,7 +54,6 @@ export {
typeMapGetSnapshot,
typeMapGetAllSnapshot,
createDocFromSnapshot,
iterateDeletedStructs,
applyUpdate,
applyUpdateV2,
readUpdate,
@@ -75,7 +72,6 @@ export {
decodeUpdate,
decodeUpdateV2,
relativePositionToJSON,
isDeleted,
isParentOf,
equalSnapshots,
PermanentUserData, // @TODO experimental
@@ -101,9 +97,10 @@ export {
UpdateEncoderV2,
UpdateDecoderV1,
UpdateDecoderV2,
equalDeleteSets,
mergeDeleteSets,
snapshotContainsUpdate
snapshotContainsUpdate,
// idset
equalIdSets,
createDeleteSetFromStructStore
} from './internals.js'
const glo = /** @type {any} */ (typeof globalThis !== 'undefined'

View File

@@ -1,5 +1,5 @@
export * from './utils/AbstractConnector.js'
export * from './utils/DeleteSet.js'
export * from './utils/IdSet.js'
export * from './utils/Doc.js'
export * from './utils/UpdateDecoder.js'
export * from './utils/UpdateEncoder.js'

View File

@@ -1,5 +1,5 @@
import {
addToDeleteSet,
addToIdSet,
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Item, Transaction // eslint-disable-line
} from '../internals.js'
@@ -63,7 +63,7 @@ export class ContentDeleted {
* @param {Item} item
*/
integrate (transaction, item) {
addToDeleteSet(transaction.deleteSet, item.id.client, item.id.clock, this.len)
addToIdSet(transaction.deleteSet, item.id.client, item.id.clock, this.len)
item.markDeleted()
}

View File

@@ -6,8 +6,7 @@ import {
readYXmlFragment,
readYXmlHook,
readYXmlText,
isDeleted,
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, Item, YEvent, AbstractType // eslint-disable-line
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, Item, AbstractType // eslint-disable-line
} from '../internals.js'
import * as error from 'lib0/error'
@@ -77,18 +76,18 @@ export class ContentType {
}
/**
* @param {number} offset
* @param {number} _offset
* @return {ContentType}
*/
splice (offset) {
splice (_offset) {
throw error.methodUnimplemented()
}
/**
* @param {ContentType} right
* @param {ContentType} _right
* @return {boolean}
*/
mergeWith (right) {
mergeWith (_right) {
return false
}
@@ -108,7 +107,7 @@ export class ContentType {
while (item !== null) {
if (!item.deleted) {
item.delete(transaction)
} else if (!isDeleted(transaction.insertSet, item.id)) {
} else if (!transaction.insertSet.has(item.id)) {
// This will be gc'd later and we want to merge it if possible
// We try to merge all deleted items after each transaction,
// but we have no knowledge about that this needs to be merged
@@ -120,7 +119,7 @@ export class ContentType {
this.type._map.forEach(item => {
if (!item.deleted) {
item.delete(transaction)
} else if (!isDeleted(transaction.insertSet, item.id)) {
} else if (!transaction.insertSet.has(item.id)) {
// same as above
transaction._mergeStructs.push(item)
}

View File

@@ -1,8 +1,8 @@
import {
AbstractStruct,
addStruct,
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, ID, // eslint-disable-line
addItemToInsertSet
addStructToIdSet,
UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction // eslint-disable-line
} from '../internals.js'
export const structGCRefNumber = 0
@@ -38,7 +38,7 @@ export class GC extends AbstractStruct {
this.id.clock += offset
this.length -= offset
}
addItemToInsertSet(transaction, this)
addStructToIdSet(transaction.insertSet, this)
addStruct(transaction.doc.store, this)
}

View File

@@ -4,7 +4,7 @@ import {
AbstractStruct,
replaceStruct,
addStruct,
addToDeleteSet,
addToIdSet,
findRootTypeKey,
compareIDs,
getItem,
@@ -21,9 +21,8 @@ import {
readContentFormat,
readContentType,
addChangedTypeToTransaction,
isDeleted,
StackItem, DeleteSet, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, ContentType, ContentDeleted, StructStore, ID, AbstractType, Transaction, // eslint-disable-line
addItemToInsertSet
addStructToIdSet,
IdSet, StackItem, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, ContentType, ContentDeleted, StructStore, ID, AbstractType, Transaction, // eslint-disable-line
} from '../internals.js'
import * as error from 'lib0/error'
@@ -125,7 +124,7 @@ export const splitItem = (transaction, leftItem, diff) => {
* @param {Array<StackItem>} stack
* @param {ID} id
*/
const isDeletedByUndoStack = (stack, id) => array.some(stack, /** @param {StackItem} s */ s => isDeleted(s.deletions, id))
const isDeletedByUndoStack = (stack, id) => array.some(stack, /** @param {StackItem} s */ s => s.deletions.has(id))
/**
* Redoes the effect of this operation.
@@ -133,7 +132,7 @@ const isDeletedByUndoStack = (stack, id) => array.some(stack, /** @param {StackI
* @param {Transaction} transaction The Yjs instance.
* @param {Item} item
* @param {Set<Item>} redoitems
* @param {DeleteSet} itemsToDelete
* @param {IdSet} itemsToDelete
* @param {boolean} ignoreRemoteMapChanges
* @param {import('../utils/UndoManager.js').UndoManager} um
*
@@ -211,7 +210,7 @@ export const redoItem = (transaction, item, redoitems, itemsToDelete, ignoreRemo
left = item
// Iterate right while right is in itemsToDelete
// If it is intended to delete right while item is redone, we can expect that item should replace right.
while (left !== null && left.right !== null && (left.right.redone || isDeleted(itemsToDelete, left.right.id) || isDeletedByUndoStack(um.undoStack, left.right.id) || isDeletedByUndoStack(um.redoStack, left.right.id))) {
while (left !== null && left.right !== null && (left.right.redone || itemsToDelete.has(left.right.id) || isDeletedByUndoStack(um.undoStack, left.right.id) || isDeletedByUndoStack(um.redoStack, left.right.id))) {
left = left.right
// follow redone
while (left.redone) left = getItemCleanStart(transaction, left.redone)
@@ -515,7 +514,7 @@ export class Item extends AbstractStruct {
if (this.parentSub === null && this.countable && !this.deleted) {
/** @type {AbstractType<any>} */ (this.parent)._length += this.length
}
addItemToInsertSet(transaction, this)
addStructToIdSet(transaction.insertSet, this)
addStruct(transaction.doc.store, this)
this.content.integrate(transaction, this)
// add parent to transaction.changed
@@ -619,7 +618,7 @@ export class Item extends AbstractStruct {
parent._length -= this.length
}
this.markDeleted()
addToDeleteSet(transaction.deleteSet, this.id.client, this.id.clock, this.length)
addToIdSet(transaction.deleteSet, this.id.client, this.id.clock, this.length)
addChangedTypeToTransaction(transaction, parent, this.parentSub)
this.content.delete(transaction)
}

View File

@@ -17,8 +17,7 @@ import {
ContentFormat,
ContentString,
splitSnapshotAffectedStructs,
iterateDeletedStructs,
iterateStructs,
iterateStructsByIdSet,
findMarker,
typeMapDelete,
typeMapSet,
@@ -493,7 +492,7 @@ export const cleanupYTextAfterTransaction = transaction => {
const needFullCleanup = new Set()
// check if another formatting item was inserted
const doc = transaction.doc
iterateDeletedStructs(transaction, transaction.insertSet, (item) => {
iterateStructsByIdSet(transaction, transaction.insertSet, (item) => {
if (
!item.deleted && /** @type {Item} */ (item).content.constructor === ContentFormat && item.constructor !== GC
) {
@@ -502,7 +501,7 @@ export const cleanupYTextAfterTransaction = transaction => {
})
// cleanup in a new transaction
transact(doc, (t) => {
iterateDeletedStructs(transaction, transaction.deleteSet, item => {
iterateStructsByIdSet(transaction, transaction.deleteSet, item => {
if (item instanceof GC || !(/** @type {YText} */ (item.parent)._hasFormatting) || needFullCleanup.has(/** @type {YText} */ (item.parent))) {
return
}
@@ -1082,7 +1081,6 @@ export class YText extends AbstractType {
return d.done()
}
/**
* Returns the Delta representation of this YText type.
*

View File

@@ -1,7 +1,7 @@
export class AttributionManager {
/**
*
*
*/
constructor () {
}

View File

@@ -1,420 +0,0 @@
import {
findIndexSS,
getState,
splitItem,
iterateStructs,
UpdateEncoderV2,
DSDecoderV1, DSEncoderV1, DSDecoderV2, DSEncoderV2, Item, GC, StructStore, Transaction, ID // eslint-disable-line
} from '../internals.js'
import * as array from 'lib0/array'
import * as math from 'lib0/math'
import * as map from 'lib0/map'
import * as encoding from 'lib0/encoding'
import * as decoding from 'lib0/decoding'
export class DeleteItem {
/**
* @param {number} clock
* @param {number} len
*/
constructor (clock, len) {
/**
* @readonly
* @type {number}
*/
this.clock = clock
/**
* @readonly
* @type {number}
*/
this.len = len
}
}
/**
* We no longer maintain a DeleteStore. DeleteSet is a temporary object that is created when needed.
* - When created in a transaction, it must only be accessed after sorting, and merging
* - This DeleteSet is send to other clients
* - We do not create a DeleteSet when we send a sync message. The DeleteSet message is created directly from StructStore
* - We read a DeleteSet as part of a sync/update message. In this case the DeleteSet is already sorted and merged.
*/
export class DeleteSet {
constructor () {
/**
* @type {Map<number,Array<DeleteItem>>}
*/
this.clients = new Map()
}
}
/**
* Iterate over all structs that the DeleteSet gc's.
*
* @param {Transaction} transaction
* @param {DeleteSet} ds
* @param {function(GC|Item):void} f
*
* @function
*/
export const iterateDeletedStructs = (transaction, ds, f) =>
ds.clients.forEach((deletes, clientid) => {
const structs = /** @type {Array<GC|Item>} */ (transaction.doc.store.clients.get(clientid))
for (let i = 0; i < deletes.length; i++) {
const del = deletes[i]
iterateStructs(transaction, structs, del.clock, del.len, f)
}
})
/**
* @param {Array<DeleteItem>} dis
* @param {number} clock
* @return {number|null}
*
* @private
* @function
*/
export const findIndexDS = (dis, clock) => {
let left = 0
let right = dis.length - 1
while (left <= right) {
const midindex = math.floor((left + right) / 2)
const mid = dis[midindex]
const midclock = mid.clock
if (midclock <= clock) {
if (clock < midclock + mid.len) {
return midindex
}
left = midindex + 1
} else {
right = midindex - 1
}
}
return null
}
/**
* @param {DeleteSet} ds
* @param {ID} id
* @return {boolean}
*
* @private
* @function
*/
export const isDeleted = (ds, id) => {
const dis = ds.clients.get(id.client)
return dis !== undefined && findIndexDS(dis, id.clock) !== null
}
/**
* @param {DeleteSet} ds
*
* @private
* @function
*/
export const sortAndMergeDeleteSet = ds => {
ds.clients.forEach((dels, client) => {
dels.sort((a, b) => a.clock - b.clock)
// merge items without filtering or splicing the array
// i is the current pointer
// j refers to the current insert position for the pointed item
// try to merge dels[i] into dels[j-1] or set dels[j]=dels[i]
let i, j
for (i = 1, j = 1; i < dels.length; i++) {
const left = dels[j - 1]
const right = dels[i]
if (left.clock + left.len >= right.clock) {
const r = right.clock + right.len - left.clock
if (left.len < r) {
dels[j - 1] = new DeleteItem(left.clock, r)
}
} else if (left.len === 0) {
dels[j - 1] = right
} else {
if (j < i) {
dels[j] = right
}
j++
}
}
if (dels[j - 1].len === 0) {
dels.length = j - 1
} else {
dels.length = j
}
if (dels.length === 0) {
ds.clients.delete(client)
}
})
}
/**
* @param {Array<DeleteSet>} dss
* @return {DeleteSet} A fresh DeleteSet
*/
export const mergeDeleteSets = dss => {
const merged = new DeleteSet()
for (let dssI = 0; dssI < dss.length; dssI++) {
dss[dssI].clients.forEach((delsLeft, client) => {
if (!merged.clients.has(client)) {
// Write all missing keys from current ds and all following.
// If merged already contains `client` current ds has already been added.
/**
* @type {Array<DeleteItem>}
*/
const dels = delsLeft.slice()
for (let i = dssI + 1; i < dss.length; i++) {
array.appendTo(dels, dss[i].clients.get(client) || [])
}
merged.clients.set(client, dels)
}
})
}
sortAndMergeDeleteSet(merged)
return merged
}
/**
* Remove all ranges from `exclude` from `ds`. The result will contain all ranges from `ds` that are not
* in `exclude`.
*
* @param {DeleteSet} ds
* @param {DeleteSet} exclude
* @return {DeleteSet}
*/
export const diffDeleteSet = (ds, exclude) => {
const res = new DeleteSet()
ds.clients.forEach((ranges, client) => {
/**
* @type {Array<DeleteItem>}
*/
const resRanges = []
const excludedRanges = exclude.clients.get(client) ?? []
let i = 0, j = 0
let currRange = ranges[0]
while (i < ranges.length && j < excludedRanges.length) {
const e = excludedRanges[j]
if (currRange.clock + currRange.len <= e.clock) { // no overlapping, use next range item
if (currRange.len > 0) resRanges.push(currRange)
currRange = ranges[++i]
} else if (e.clock + e.len <= currRange.clock) { // no overlapping, use next excluded item
j++
} else if (e.clock <= currRange.clock) { // exclude laps into range (we already know that the ranges somehow collide)
const newClock = e.clock + e.len
const newLen = currRange.clock + currRange.len - newClock
if (newLen > 0) {
currRange = new DeleteItem(newClock, newLen)
j++
} else {
// this item is completely overwritten. len=0. We can jump to the next range
currRange = ranges[++i]
}
} else { // currRange.clock < e.clock -- range laps into exclude => adjust len
// beginning can't be empty, add it to the result
const nextLen = e.clock - currRange.clock
resRanges.push(new DeleteItem(currRange.clock, nextLen))
// retain the remaining length after exclude in currRange
currRange = new DeleteItem(currRange.clock + e.len + nextLen, math.max(currRange.len - e.len - nextLen, 0))
if (currRange.len === 0) currRange = ranges[++i]
else j++
}
}
if (currRange != null) {
resRanges.push(currRange)
}
i++
while (i < ranges.length) {
resRanges.push(ranges[i++])
}
if (resRanges.length > 0) res.clients.set(client, resRanges)
})
return res
}
/**
* @param {DeleteSet} ds
* @param {number} client
* @param {number} clock
* @param {number} length
*
* @private
* @function
*/
export const addToDeleteSet = (ds, client, clock, length) => {
map.setIfUndefined(ds.clients, client, () => /** @type {Array<DeleteItem>} */ ([])).push(new DeleteItem(clock, length))
}
export const createDeleteSet = () => new DeleteSet()
/**
* @param {StructStore} ss
* @return {DeleteSet} Merged and sorted DeleteSet
*
* @private
* @function
*/
export const createDeleteSetFromStructStore = ss => {
const ds = createDeleteSet()
ss.clients.forEach((structs, client) => {
/**
* @type {Array<DeleteItem>}
*/
const dsitems = []
for (let i = 0; i < structs.length; i++) {
const struct = structs[i]
if (struct.deleted) {
const clock = struct.id.clock
let len = struct.length
if (i + 1 < structs.length) {
for (let next = structs[i + 1]; i + 1 < structs.length && next.deleted; next = structs[++i + 1]) {
len += next.length
}
}
dsitems.push(new DeleteItem(clock, len))
}
}
if (dsitems.length > 0) {
ds.clients.set(client, dsitems)
}
})
return ds
}
/**
* @param {DSEncoderV1 | DSEncoderV2} encoder
* @param {DeleteSet} ds
*
* @private
* @function
*/
export const writeDeleteSet = (encoder, ds) => {
encoding.writeVarUint(encoder.restEncoder, ds.clients.size)
// Ensure that the delete set is written in a deterministic order
array.from(ds.clients.entries())
.sort((a, b) => b[0] - a[0])
.forEach(([client, dsitems]) => {
encoder.resetDsCurVal()
encoding.writeVarUint(encoder.restEncoder, client)
const len = dsitems.length
encoding.writeVarUint(encoder.restEncoder, len)
for (let i = 0; i < len; i++) {
const item = dsitems[i]
encoder.writeDsClock(item.clock)
encoder.writeDsLen(item.len)
}
})
}
/**
* @param {DSDecoderV1 | DSDecoderV2} decoder
* @return {DeleteSet}
*
* @private
* @function
*/
export const readDeleteSet = decoder => {
const ds = new DeleteSet()
const numClients = decoding.readVarUint(decoder.restDecoder)
for (let i = 0; i < numClients; i++) {
decoder.resetDsCurVal()
const client = decoding.readVarUint(decoder.restDecoder)
const numberOfDeletes = decoding.readVarUint(decoder.restDecoder)
if (numberOfDeletes > 0) {
const dsField = map.setIfUndefined(ds.clients, client, () => /** @type {Array<DeleteItem>} */ ([]))
for (let i = 0; i < numberOfDeletes; i++) {
dsField.push(new DeleteItem(decoder.readDsClock(), decoder.readDsLen()))
}
}
}
return ds
}
/**
* @todo YDecoder also contains references to String and other Decoders. Would make sense to exchange YDecoder.toUint8Array for YDecoder.DsToUint8Array()..
*/
/**
* @param {DSDecoderV1 | DSDecoderV2} decoder
* @param {Transaction} transaction
* @param {StructStore} store
* @return {Uint8Array|null} Returns a v2 update containing all deletes that couldn't be applied yet; or null if all deletes were applied successfully.
*
* @private
* @function
*/
export const readAndApplyDeleteSet = (decoder, transaction, store) => {
const unappliedDS = new DeleteSet()
const numClients = decoding.readVarUint(decoder.restDecoder)
for (let i = 0; i < numClients; i++) {
decoder.resetDsCurVal()
const client = decoding.readVarUint(decoder.restDecoder)
const numberOfDeletes = decoding.readVarUint(decoder.restDecoder)
const structs = store.clients.get(client) || []
const state = getState(store, client)
for (let i = 0; i < numberOfDeletes; i++) {
const clock = decoder.readDsClock()
const clockEnd = clock + decoder.readDsLen()
if (clock < state) {
if (state < clockEnd) {
addToDeleteSet(unappliedDS, client, state, clockEnd - state)
}
let index = findIndexSS(structs, clock)
/**
* We can ignore the case of GC and Delete structs, because we are going to skip them
* @type {Item}
*/
// @ts-ignore
let struct = structs[index]
// split the first item if necessary
if (!struct.deleted && struct.id.clock < clock) {
structs.splice(index + 1, 0, splitItem(transaction, struct, clock - struct.id.clock))
index++ // increase we now want to use the next struct
}
while (index < structs.length) {
// @ts-ignore
struct = structs[index++]
if (struct.id.clock < clockEnd) {
if (!struct.deleted) {
if (clockEnd < struct.id.clock + struct.length) {
structs.splice(index, 0, splitItem(transaction, struct, clockEnd - struct.id.clock))
}
struct.delete(transaction)
}
} else {
break
}
}
} else {
addToDeleteSet(unappliedDS, client, clock, clockEnd - clock)
}
}
}
if (unappliedDS.clients.size > 0) {
const ds = new UpdateEncoderV2()
encoding.writeVarUint(ds.restEncoder, 0) // encode 0 structs
writeDeleteSet(ds, unappliedDS)
return ds.toUint8Array()
}
return null
}
/**
* @param {DeleteSet} ds1
* @param {DeleteSet} ds2
*/
export const equalDeleteSets = (ds1, ds2) => {
if (ds1.clients.size !== ds2.clients.size) return false
for (const [client, deleteItems1] of ds1.clients.entries()) {
const deleteItems2 = /** @type {Array<import('../internals.js').DeleteItem>} */ (ds2.clients.get(client))
if (deleteItems2 === undefined || deleteItems1.length !== deleteItems2.length) return false
for (let i = 0; i < deleteItems1.length; i++) {
const di1 = deleteItems1[i]
const di2 = deleteItems2[i]
if (di1.clock !== di2.clock || di1.len !== di2.len) {
return false
}
}
}
return true
}

View File

@@ -27,6 +27,7 @@ export class InsertOp {
this.attributes = attributes
this.attribution = attribution
}
toJSON () {
return object.assign({ insert: this.insert }, this.attributes ? { attributes: this.attributes } : ({}), this.attribution ? { attribution: this.attribution } : ({}))
}
@@ -39,6 +40,7 @@ export class DeleteOp {
constructor (len) {
this.delete = len
}
toJSON () {
return { delete: this.delete }
}
@@ -55,6 +57,7 @@ export class RetainOp {
this.attributes = attributes
this.attribution = attribution
}
toJSON () {
return object.assign({ retain: this.retain }, this.attributes ? { attributes: this.attributes } : {}, this.attribution ? { attribution: this.attribution } : {})
}
@@ -67,6 +70,7 @@ export class Delta {
*/
this.ops = []
}
toJSON () {
return { ops: this.ops.map(o => o.toJSON()) }
}

460
src/utils/IdSet.js Normal file
View File

@@ -0,0 +1,460 @@
import {
findIndexSS,
getState,
splitItem,
iterateStructs,
UpdateEncoderV2,
AbstractStruct, DSDecoderV1, DSEncoderV1, DSDecoderV2, DSEncoderV2, Item, GC, StructStore, Transaction, ID // eslint-disable-line
} from '../internals.js'
import * as array from 'lib0/array'
import * as math from 'lib0/math'
import * as encoding from 'lib0/encoding'
import * as decoding from 'lib0/decoding'
export class IdRange {
/**
* @param {number} clock
* @param {number} len
*/
constructor (clock, len) {
/**
* @readonly
* @type {number}
*/
this.clock = clock
/**
* @readonly
* @type {number}
*/
this.len = len
}
}
class IdRanges {
/**
* @param {Array<IdRange>} ids
*/
constructor (ids) {
this.sorted = false
/**
* @private
*/
this._ids = ids
}
/**
* @param {number} clock
* @param {number} length
*/
add (clock, length) {
const last = this._ids[this._ids.length - 1]
if (last.clock + last.len === clock) {
this._ids[this._ids.length - 1] = new IdRange(last.clock, last.len + length)
} else {
this.sorted = false
this._ids.push(new IdRange(clock, length))
}
}
/**
* Return the list of id ranges, sorted and merged.
*/
getIds () {
const ids = this._ids
if (!this.sorted) {
this.sorted = true
ids.sort((a, b) => a.clock - b.clock)
// merge items without filtering or splicing the array
// i is the current pointer
// j refers to the current insert position for the pointed item
// try to merge dels[i] into dels[j-1] or set dels[j]=dels[i]
let i, j
for (i = 1, j = 1; i < ids.length; i++) {
const left = ids[j - 1]
const right = ids[i]
if (left.clock + left.len >= right.clock) {
const r = right.clock + right.len - left.clock
if (left.len < r) {
ids[j - 1] = new IdRange(left.clock, r)
}
} else if (left.len === 0) {
ids[j - 1] = right
} else {
if (j < i) {
ids[j] = right
}
j++
}
}
if (ids[j - 1].len === 0) {
ids.length = j - 1
} else {
ids.length = j
}
}
return ids
}
}
export class IdSet {
constructor () {
/**
* @type {Map<number,IdRanges>}
*/
this.clients = new Map()
}
/**
* @param {ID} id
* @return {boolean}
*/
has (id) {
const dr = this.clients.get(id.client)
if (dr) {
return findIndexInIdRanges(dr.getIds(), id.clock) !== null
}
return false
}
}
/**
* Iterate over all structs that are mentioned by the IdSet.
*
* @param {Transaction} transaction
* @param {IdSet} ds
* @param {function(GC|Item):void} f
*
* @function
*/
export const iterateStructsByIdSet = (transaction, ds, f) =>
ds.clients.forEach((idRanges, clientid) => {
const ranges = idRanges.getIds()
const structs = /** @type {Array<GC|Item>} */ (transaction.doc.store.clients.get(clientid))
for (let i = 0; i < ranges.length; i++) {
const del = ranges[i]
iterateStructs(transaction, structs, del.clock, del.len, f)
}
})
/**
* @param {Array<IdRange>} dis
* @param {number} clock
* @return {number|null}
*
* @private
* @function
*/
export const findIndexInIdRanges = (dis, clock) => {
let left = 0
let right = dis.length - 1
while (left <= right) {
const midindex = math.floor((left + right) / 2)
const mid = dis[midindex]
const midclock = mid.clock
if (midclock <= clock) {
if (clock < midclock + mid.len) {
return midindex
}
left = midindex + 1
} else {
right = midindex - 1
}
}
return null
}
/**
* @param {Array<IdSet>} idSets
* @return {IdSet} A fresh IdSet
*/
export const mergeIdSets = idSets => {
const merged = new IdSet()
for (let dssI = 0; dssI < idSets.length; dssI++) {
idSets[dssI].clients.forEach((rangesLeft, client) => {
if (!merged.clients.has(client)) {
// Write all missing keys from current ds and all following.
// If merged already contains `client` current ds has already been added.
const ids = rangesLeft.getIds().slice()
for (let i = dssI + 1; i < idSets.length; i++) {
const nextIds = idSets[i].clients.get(client)
if (nextIds) {
array.appendTo(ids, nextIds.getIds())
}
}
merged.clients.set(client, new IdRanges(ids))
}
})
}
return merged
}
/**
* Remove all ranges from `exclude` from `ds`. The result is a fresh IdSet containing all ranges from `idSet` that are not
* in `exclude`.
*
* @param {IdSet} idSet
* @param {IdSet} exclude
* @return {IdSet}
*/
export const diffIdSets = (idSet, exclude) => {
const res = new IdSet()
idSet.clients.forEach((_idRanges, client) => {
/**
* @type {Array<IdRange>}
*/
let resRanges = []
const _excludedRanges = exclude.clients.get(client)
const idRanges = _idRanges.getIds()
if (_excludedRanges == null) {
resRanges = idRanges.slice()
} else {
const excludedRanges = _excludedRanges.getIds()
let i = 0; let j = 0
let currRange = idRanges[0]
while (i < idRanges.length && j < excludedRanges.length) {
const e = excludedRanges[j]
if (currRange.clock + currRange.len <= e.clock) { // no overlapping, use next range item
if (currRange.len > 0) resRanges.push(currRange)
currRange = idRanges[++i]
} else if (e.clock + e.len <= currRange.clock) { // no overlapping, use next excluded item
j++
} else if (e.clock <= currRange.clock) { // exclude laps into range (we already know that the ranges somehow collide)
const newClock = e.clock + e.len
const newLen = currRange.clock + currRange.len - newClock
if (newLen > 0) {
currRange = new IdRange(newClock, newLen)
j++
} else {
// this item is completely overwritten. len=0. We can jump to the next range
currRange = idRanges[++i]
}
} else { // currRange.clock < e.clock -- range laps into exclude => adjust len
// beginning can't be empty, add it to the result
const nextLen = e.clock - currRange.clock
resRanges.push(new IdRange(currRange.clock, nextLen))
// retain the remaining length after exclude in currRange
currRange = new IdRange(currRange.clock + e.len + nextLen, math.max(currRange.len - e.len - nextLen, 0))
if (currRange.len === 0) currRange = idRanges[++i]
else j++
}
}
if (currRange != null) {
resRanges.push(currRange)
}
i++
while (i < idRanges.length) {
resRanges.push(idRanges[i++])
}
}
if (resRanges.length > 0) res.clients.set(client, new IdRanges(resRanges))
})
return res
}
/**
* @param {IdSet} idSet
* @param {number} client
* @param {number} clock
* @param {number} length
*
* @private
* @function
*/
export const addToIdSet = (idSet, client, clock, length) => {
const idRanges = idSet.clients.get(client)
if (idRanges) {
idRanges.add(clock, length)
} else {
idSet.clients.set(client, new IdRanges([new IdRange(clock, length)]))
}
}
/**
* @param {IdSet} idSet
* @param {AbstractStruct} struct
*
* @private
* @function
*/
export const addStructToIdSet = (idSet, struct) => addToIdSet(idSet, struct.id.client, struct.id.clock, struct.length)
export const createIdSet = () => new IdSet()
/**
* @param {StructStore} ss
* @return {IdSet}
*
* @private
* @function
*/
export const createDeleteSetFromStructStore = ss => {
const ds = createIdSet()
ss.clients.forEach((structs, client) => {
/**
* @type {Array<IdRange>}
*/
const dsitems = []
for (let i = 0; i < structs.length; i++) {
const struct = structs[i]
if (struct.deleted) {
const clock = struct.id.clock
let len = struct.length
if (i + 1 < structs.length) {
for (let next = structs[i + 1]; i + 1 < structs.length && next.deleted; next = structs[++i + 1]) {
len += next.length
}
}
dsitems.push(new IdRange(clock, len))
}
}
if (dsitems.length > 0) {
ds.clients.set(client, new IdRanges(dsitems))
}
})
return ds
}
/**
* @param {DSEncoderV1 | DSEncoderV2} encoder
* @param {IdSet} idSet
*
* @private
* @function
*/
export const writeIdSet = (encoder, idSet) => {
encoding.writeVarUint(encoder.restEncoder, idSet.clients.size)
// Ensure that the delete set is written in a deterministic order
array.from(idSet.clients.entries())
.sort((a, b) => b[0] - a[0])
.forEach(([client, _idRanges]) => {
const idRanges = _idRanges.getIds()
encoder.resetDsCurVal()
encoding.writeVarUint(encoder.restEncoder, client)
const len = idRanges.length
encoding.writeVarUint(encoder.restEncoder, len)
for (let i = 0; i < len; i++) {
const item = idRanges[i]
encoder.writeDsClock(item.clock)
encoder.writeDsLen(item.len)
}
})
}
/**
* @param {DSDecoderV1 | DSDecoderV2} decoder
* @return {IdSet}
*
* @private
* @function
*/
export const readIdSet = decoder => {
const ds = new IdSet()
const numClients = decoding.readVarUint(decoder.restDecoder)
for (let i = 0; i < numClients; i++) {
decoder.resetDsCurVal()
const client = decoding.readVarUint(decoder.restDecoder)
const numberOfDeletes = decoding.readVarUint(decoder.restDecoder)
if (numberOfDeletes > 0) {
/**
* @type {Array<IdRange>}
*/
const dsRanges = []
for (let i = 0; i < numberOfDeletes; i++) {
dsRanges.push(new IdRange(decoder.readDsClock(), decoder.readDsLen()))
}
ds.clients.set(client, new IdRanges(dsRanges))
}
}
return ds
}
/**
* @todo YDecoder also contains references to String and other Decoders. Would make sense to exchange YDecoder.toUint8Array for YDecoder.DsToUint8Array()..
*/
/**
* @param {DSDecoderV1 | DSDecoderV2} decoder
* @param {Transaction} transaction
* @param {StructStore} store
* @return {Uint8Array|null} Returns a v2 update containing all deletes that couldn't be applied yet; or null if all deletes were applied successfully.
*
* @private
* @function
*/
export const readAndApplyDeleteSet = (decoder, transaction, store) => {
const unappliedDS = new IdSet()
const numClients = decoding.readVarUint(decoder.restDecoder)
for (let i = 0; i < numClients; i++) {
decoder.resetDsCurVal()
const client = decoding.readVarUint(decoder.restDecoder)
const numberOfDeletes = decoding.readVarUint(decoder.restDecoder)
const structs = store.clients.get(client) || []
const state = getState(store, client)
for (let i = 0; i < numberOfDeletes; i++) {
const clock = decoder.readDsClock()
const clockEnd = clock + decoder.readDsLen()
if (clock < state) {
if (state < clockEnd) {
addToIdSet(unappliedDS, client, state, clockEnd - state)
}
let index = findIndexSS(structs, clock)
/**
* We can ignore the case of GC and Delete structs, because we are going to skip them
* @type {Item}
*/
// @ts-ignore
let struct = structs[index]
// split the first item if necessary
if (!struct.deleted && struct.id.clock < clock) {
structs.splice(index + 1, 0, splitItem(transaction, struct, clock - struct.id.clock))
index++ // increase we now want to use the next struct
}
while (index < structs.length) {
// @ts-ignore
struct = structs[index++]
if (struct.id.clock < clockEnd) {
if (!struct.deleted) {
if (clockEnd < struct.id.clock + struct.length) {
structs.splice(index, 0, splitItem(transaction, struct, clockEnd - struct.id.clock))
}
struct.delete(transaction)
}
} else {
break
}
}
} else {
addToIdSet(unappliedDS, client, clock, clockEnd - clock)
}
}
}
if (unappliedDS.clients.size > 0) {
const ds = new UpdateEncoderV2()
encoding.writeVarUint(ds.restEncoder, 0) // encode 0 structs
writeIdSet(ds, unappliedDS)
return ds.toUint8Array()
}
return null
}
/**
* @param {IdSet} ds1
* @param {IdSet} ds2
*/
export const equalIdSets = (ds1, ds2) => {
if (ds1.clients.size !== ds2.clients.size) return false
for (const [client, _deleteItems1] of ds1.clients.entries()) {
const deleteItems1 = _deleteItems1.getIds()
const deleteItems2 = ds2.clients.get(client)?.getIds()
if (deleteItems2 === undefined || deleteItems1.length !== deleteItems2.length) return false
for (let i = 0; i < deleteItems1.length; i++) {
const di1 = deleteItems1[i]
const di2 = deleteItems2[i]
if (di1.clock !== di2.clock || di1.len !== di2.len) {
return false
}
}
}
return true
}

View File

@@ -1,16 +1,15 @@
import {
YArray,
YMap,
readDeleteSet,
writeDeleteSet,
createDeleteSet,
DSEncoderV1, DSDecoderV1, ID, DeleteSet, YArrayEvent, Transaction, Doc // eslint-disable-line
readIdSet,
writeIdSet,
createIdSet,
mergeIdSets,
DSEncoderV1, DSDecoderV1, ID, IdSet, YArrayEvent, Transaction, Doc // eslint-disable-line
} from '../internals.js'
import * as decoding from 'lib0/decoding'
import { mergeDeleteSets, isDeleted } from './DeleteSet.js'
export class PermanentUserData {
/**
* @param {Doc} doc
@@ -18,7 +17,7 @@ export class PermanentUserData {
*/
constructor (doc, storeType = doc.getMap('users')) {
/**
* @type {Map<string,DeleteSet>}
* @type {Map<string,IdSet>}
*/
const dss = new Map()
this.yusers = storeType
@@ -45,12 +44,12 @@ export class PermanentUserData {
event.changes.added.forEach(item => {
item.content.getContent().forEach(encodedDs => {
if (encodedDs instanceof Uint8Array) {
this.dss.set(userDescription, mergeDeleteSets([this.dss.get(userDescription) || createDeleteSet(), readDeleteSet(new DSDecoderV1(decoding.createDecoder(encodedDs)))]))
this.dss.set(userDescription, mergeIdSets([this.dss.get(userDescription) || createIdSet(), readIdSet(new DSDecoderV1(decoding.createDecoder(encodedDs)))]))
}
})
})
})
this.dss.set(userDescription, mergeDeleteSets(ds.map(encodedDs => readDeleteSet(new DSDecoderV1(decoding.createDecoder(encodedDs))))))
this.dss.set(userDescription, mergeIdSets(ds.map(encodedDs => readIdSet(new DSDecoderV1(decoding.createDecoder(encodedDs))))))
ids.observe(/** @param {YArrayEvent<any>} event */ event =>
event.changes.added.forEach(item => item.content.getContent().forEach(addClientId))
)
@@ -71,7 +70,7 @@ export class PermanentUserData {
* @param {number} clientid
* @param {string} userDescription
* @param {Object} conf
* @param {function(Transaction, DeleteSet):boolean} [conf.filter]
* @param {function(Transaction, IdSet):boolean} [conf.filter]
*/
setUserMapping (doc, clientid, userDescription, { filter = () => true } = {}) {
const users = this.yusers
@@ -99,7 +98,7 @@ export class PermanentUserData {
const encoder = new DSEncoderV1()
const ds = this.dss.get(userDescription)
if (ds) {
writeDeleteSet(encoder, ds)
writeIdSet(encoder, ds)
user.get('ds').push([encoder.toUint8Array()])
}
}
@@ -111,7 +110,7 @@ export class PermanentUserData {
const ds = transaction.deleteSet
if (transaction.local && ds.clients.size > 0 && filter(transaction, ds)) {
const encoder = new DSEncoderV1()
writeDeleteSet(encoder, ds)
writeIdSet(encoder, ds)
yds.push([encoder.toUint8Array()])
}
})
@@ -132,7 +131,7 @@ export class PermanentUserData {
*/
getUserByDeletedId (id) {
for (const [userDescription, ds] of this.dss.entries()) {
if (isDeleted(ds, id)) {
if (ds.has(id)) {
return userDescription
}
}

View File

@@ -1,23 +1,22 @@
import {
isDeleted,
createDeleteSetFromStructStore,
getStateVector,
getItemCleanStart,
iterateDeletedStructs,
writeDeleteSet,
iterateStructsByIdSet,
writeIdSet,
writeStateVector,
readDeleteSet,
readIdSet,
readStateVector,
createDeleteSet,
createIdSet,
createID,
getState,
findIndexSS,
UpdateEncoderV2,
applyUpdateV2,
LazyStructReader,
equalDeleteSets,
UpdateDecoderV1, UpdateDecoderV2, DSEncoderV1, DSEncoderV2, DSDecoderV1, DSDecoderV2, Transaction, Doc, DeleteSet, Item, // eslint-disable-line
mergeDeleteSets
equalIdSets,
UpdateDecoderV1, UpdateDecoderV2, DSEncoderV1, DSEncoderV2, DSDecoderV1, DSDecoderV2, Transaction, Doc, IdSet, Item, // eslint-disable-line
mergeIdSets
} from '../internals.js'
import * as map from 'lib0/map'
@@ -27,12 +26,12 @@ import * as encoding from 'lib0/encoding'
export class Snapshot {
/**
* @param {DeleteSet} ds
* @param {IdSet} ds
* @param {Map<number,number>} sv state map
*/
constructor (ds, sv) {
/**
* @type {DeleteSet}
* @type {IdSet}
*/
this.ds = ds
/**
@@ -49,11 +48,9 @@ export class Snapshot {
* @return {boolean}
*/
export const equalSnapshots = (snap1, snap2) => {
const ds1 = snap1.ds.clients
const ds2 = snap2.ds.clients
const sv1 = snap1.sv
const sv2 = snap2.sv
if (sv1.size !== sv2.size || ds1.size !== ds2.size) {
if (sv1.size !== sv2.size) {
return false
}
for (const [key, value] of sv1.entries()) {
@@ -61,20 +58,7 @@ export const equalSnapshots = (snap1, snap2) => {
return false
}
}
for (const [client, dsitems1] of ds1.entries()) {
const dsitems2 = ds2.get(client) || []
if (dsitems1.length !== dsitems2.length) {
return false
}
for (let i = 0; i < dsitems1.length; i++) {
const dsitem1 = dsitems1[i]
const dsitem2 = dsitems2[i]
if (dsitem1.clock !== dsitem2.clock || dsitem1.len !== dsitem2.len) {
return false
}
}
}
return true
return equalIdSets(snap1.ds, snap2.ds)
}
/**
@@ -83,7 +67,7 @@ export const equalSnapshots = (snap1, snap2) => {
* @return {Uint8Array}
*/
export const encodeSnapshotV2 = (snapshot, encoder = new DSEncoderV2()) => {
writeDeleteSet(encoder, snapshot.ds)
writeIdSet(encoder, snapshot.ds)
writeStateVector(encoder, snapshot.sv)
return encoder.toUint8Array()
}
@@ -100,7 +84,7 @@ export const encodeSnapshot = snapshot => encodeSnapshotV2(snapshot, new DSEncod
* @return {Snapshot}
*/
export const decodeSnapshotV2 = (buf, decoder = new DSDecoderV2(decoding.createDecoder(buf))) => {
return new Snapshot(readDeleteSet(decoder), readStateVector(decoder))
return new Snapshot(readIdSet(decoder), readStateVector(decoder))
}
/**
@@ -110,13 +94,13 @@ export const decodeSnapshotV2 = (buf, decoder = new DSDecoderV2(decoding.createD
export const decodeSnapshot = buf => decodeSnapshotV2(buf, new DSDecoderV1(decoding.createDecoder(buf)))
/**
* @param {DeleteSet} ds
* @param {IdSet} ds
* @param {Map<number,number>} sm
* @return {Snapshot}
*/
export const createSnapshot = (ds, sm) => new Snapshot(ds, sm)
export const emptySnapshot = createSnapshot(createDeleteSet(), new Map())
export const emptySnapshot = createSnapshot(createIdSet(), new Map())
/**
* @param {Doc} doc
@@ -133,7 +117,7 @@ export const snapshot = doc => createSnapshot(createDeleteSetFromStructStore(doc
*/
export const isVisible = (item, snapshot) => snapshot === undefined
? !item.deleted
: snapshot.sv.has(item.id.client) && (snapshot.sv.get(item.id.client) || 0) > item.id.clock && !isDeleted(snapshot.ds, item.id)
: snapshot.sv.has(item.id.client) && (snapshot.sv.get(item.id.client) || 0) > item.id.clock && !snapshot.ds.has(item.id)
/**
* @param {Transaction} transaction
@@ -149,7 +133,7 @@ export const splitSnapshotAffectedStructs = (transaction, snapshot) => {
getItemCleanStart(transaction, createID(client, clock))
}
})
iterateDeletedStructs(transaction, snapshot.ds, _item => {})
iterateStructsByIdSet(transaction, snapshot.ds, _item => {})
meta.add(snapshot)
}
}
@@ -203,7 +187,7 @@ export const createDocFromSnapshot = (originDoc, snapshot, newDoc = new Doc()) =
structs[i].write(encoder, 0)
}
}
writeDeleteSet(encoder, ds)
writeIdSet(encoder, ds)
})
applyUpdateV2(newDoc, encoder.toUint8Array(), 'snapshot')
@@ -225,8 +209,8 @@ export const snapshotContainsUpdateV2 = (snapshot, update, YDecoder = UpdateDeco
return false
}
}
const mergedDS = mergeDeleteSets([snapshot.ds, readDeleteSet(updateDecoder)])
return equalDeleteSets(snapshot.ds, mergedDS)
const mergedDS = mergeIdSets([snapshot.ds, readIdSet(updateDecoder)])
return equalIdSets(snapshot.ds, mergedDS)
}
/**

View File

@@ -1,19 +1,16 @@
import {
getState,
writeStructsFromTransaction,
writeDeleteSet,
DeleteSet,
sortAndMergeDeleteSet,
writeIdSet,
getStateVector,
findIndexSS,
callEventHandlerListeners,
createIdSet,
Item,
generateNewClientId,
createID,
cleanupYTextAfterTransaction,
isDeleted,
UpdateEncoderV1, UpdateEncoderV2, GC, StructStore, AbstractType, AbstractStruct, YEvent, Doc, // eslint-disable-line
DeleteItem
IdSet, UpdateEncoderV1, UpdateEncoderV2, GC, StructStore, AbstractType, AbstractStruct, YEvent, Doc // eslint-disable-line
} from '../internals.js'
import * as error from 'lib0/error'
@@ -62,14 +59,12 @@ export class Transaction {
this.doc = doc
/**
* Describes the set of deleted items by ids
* @type {DeleteSet}
*/
this.deleteSet = new DeleteSet()
this.deleteSet = createIdSet()
/**
* Describes the set of inserted items by ids
* @type {DeleteSet}
*/
this.insertSet = new DeleteSet()
this.insertSet = createIdSet()
/**
* Holds the state before the transaction started.
* @type {Map<Number,Number>?}
@@ -140,7 +135,7 @@ export class Transaction {
if (this._beforeState == null) {
const sv = getStateVector(this.doc.store)
this.insertSet.clients.forEach((ranges, client) => {
sv.set(client, ranges[0].clock)
sv.set(client, ranges.getIds()[0].clock)
})
this._beforeState = sv
}
@@ -157,7 +152,8 @@ export class Transaction {
if (!this._done) error.unexpectedCase()
if (this._afterState == null) {
const sv = getStateVector(this.doc.store)
this.insertSet.clients.forEach((ranges, client) => {
this.insertSet.clients.forEach((_ranges, client) => {
const ranges = _ranges.getIds()
const d = ranges[ranges.length - 1]
sv.set(client, d.clock + d.len)
})
@@ -176,9 +172,8 @@ export const writeUpdateMessageFromTransaction = (encoder, transaction) => {
if (transaction.deleteSet.clients.size === 0 && transaction.insertSet.clients.size === 0) {
return false
}
sortAndMergeDeleteSet(transaction.deleteSet)
writeStructsFromTransaction(encoder, transaction)
writeDeleteSet(encoder, transaction.deleteSet)
writeIdSet(encoder, transaction.deleteSet)
return true
}
@@ -203,28 +198,11 @@ export const nextID = transaction => {
*/
export const addChangedTypeToTransaction = (transaction, type, parentSub) => {
const item = type._item
if (item === null || (!item.deleted && !isDeleted(transaction.insertSet, item.id))) {
if (item === null || (!item.deleted && !transaction.insertSet.has(item.id))) {
map.setIfUndefined(transaction.changed, type, set.create).add(parentSub)
}
}
/**
* @param {Transaction} tr
* @param {AbstractStruct} item
*/
export const addItemToInsertSet = (tr, item) => {
const ranges = map.setIfUndefined(tr.insertSet.clients, item.id.client, () => /** @type {Array<import('./DeleteSet.js').DeleteItem>} */ ([]))
if (ranges.length > 0) {
const r = ranges[ranges.length - 1]
if (r.clock + r.len === item.id.clock) {
// @ts-ignore
r.len += item.length
return
}
}
ranges.push(new DeleteItem(item.id.clock, item.length))
}
/**
* @param {Array<AbstractStruct>} structs
* @param {number} pos
@@ -254,12 +232,13 @@ const tryToMergeWithLefts = (structs, pos) => {
}
/**
* @param {DeleteSet} ds
* @param {IdSet} ds
* @param {StructStore} store
* @param {function(Item):boolean} gcFilter
*/
const tryGcDeleteSet = (ds, store, gcFilter) => {
for (const [client, deleteItems] of ds.clients.entries()) {
for (const [client, _deleteItems] of ds.clients.entries()) {
const deleteItems = _deleteItems.getIds()
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
for (let di = deleteItems.length - 1; di >= 0; di--) {
const deleteItem = deleteItems[di]
@@ -282,13 +261,14 @@ const tryGcDeleteSet = (ds, store, gcFilter) => {
}
/**
* @param {DeleteSet} ds
* @param {IdSet} ds
* @param {StructStore} store
*/
const tryMergeDeleteSet = (ds, store) => {
// try to merge deleted / gc'd items
// merge from right to left for better efficiency and so we don't miss any merge targets
ds.clients.forEach((deleteItems, client) => {
ds.clients.forEach((_deleteItems, client) => {
const deleteItems = _deleteItems.getIds()
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
for (let di = deleteItems.length - 1; di >= 0; di--) {
const deleteItem = deleteItems[di]
@@ -306,7 +286,7 @@ const tryMergeDeleteSet = (ds, store) => {
}
/**
* @param {DeleteSet} ds
* @param {IdSet} ds
* @param {StructStore} store
* @param {function(Item):boolean} gcFilter
*/
@@ -326,11 +306,8 @@ const cleanupTransactions = (transactionCleanups, i) => {
const doc = transaction.doc
const store = doc.store
const ds = transaction.deleteSet
const insertSet = transaction.insertSet
const mergeStructs = transaction._mergeStructs
try {
sortAndMergeDeleteSet(ds)
sortAndMergeDeleteSet(insertSet)
doc.emit('beforeObserverCalls', [transaction, doc])
/**
* An array of event callbacks.
@@ -388,7 +365,7 @@ const cleanupTransactions = (transactionCleanups, i) => {
// on all affected store.clients props, try to merge
transaction.insertSet.clients.forEach((ids, client) => {
const firstClock = ids[0].clock
const firstClock = ids.getIds()[0].clock
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
// we iterate from right to left so we can safely remove entries
const firstChangePos = math.max(findIndexSS(structs, firstClock), 1)

View File

@@ -1,6 +1,6 @@
import {
mergeDeleteSets,
iterateDeletedStructs,
mergeIdSets,
iterateStructsByIdSet,
keepItem,
transact,
createID,
@@ -8,9 +8,7 @@ import {
isParentOf,
followRedone,
getItemCleanStart,
isDeleted,
addToDeleteSet,
YEvent, Transaction, Doc, Item, GC, DeleteSet, AbstractType // eslint-disable-line
YEvent, Transaction, Doc, Item, GC, IdSet, AbstractType // eslint-disable-line
} from '../internals.js'
import * as time from 'lib0/time'
@@ -20,8 +18,8 @@ import { ObservableV2 } from 'lib0/observable'
export class StackItem {
/**
* @param {DeleteSet} deletions
* @param {DeleteSet} insertions
* @param {IdSet} deletions
* @param {IdSet} insertions
*/
constructor (deletions, insertions) {
this.insertions = insertions
@@ -38,7 +36,7 @@ export class StackItem {
* @param {StackItem} stackItem
*/
const clearUndoManagerStackItem = (tr, um, stackItem) => {
iterateDeletedStructs(tr, stackItem.deletions, item => {
iterateStructsByIdSet(tr, stackItem.deletions, item => {
if (item instanceof Item && um.scope.some(type => type === tr.doc || isParentOf(/** @type {AbstractType<any>} */ (type), item))) {
keepItem(item, false)
}
@@ -72,7 +70,7 @@ const popStackItem = (undoManager, stack, eventType) => {
*/
const itemsToDelete = []
let performedChange = false
iterateDeletedStructs(transaction, stackItem.insertions, struct => {
iterateStructsByIdSet(transaction, stackItem.insertions, struct => {
if (struct instanceof Item) {
if (struct.redone !== null) {
let { item, diff } = followRedone(store, struct.id)
@@ -86,12 +84,12 @@ const popStackItem = (undoManager, stack, eventType) => {
}
}
})
iterateDeletedStructs(transaction, stackItem.deletions, struct => {
iterateStructsByIdSet(transaction, stackItem.deletions, struct => {
if (
struct instanceof Item &&
scope.some(type => type === transaction.doc || isParentOf(/** @type {AbstractType<any>} */ (type), struct)) &&
// Never redo structs in stackItem.insertions because they were created and deleted in the same capture interval.
!isDeleted(stackItem.insertions, struct.id)
!stackItem.insertions.has(struct.id)
) {
itemsToRedo.add(struct)
}
@@ -232,8 +230,8 @@ export class UndoManager extends ObservableV2 {
if (this.lastChange > 0 && now - this.lastChange < this.captureTimeout && stack.length > 0 && !undoing && !redoing) {
// append change to last stack op
const lastOp = stack[stack.length - 1]
lastOp.deletions = mergeDeleteSets([lastOp.deletions, transaction.deleteSet])
lastOp.insertions = mergeDeleteSets([lastOp.insertions, insertions])
lastOp.deletions = mergeIdSets([lastOp.deletions, transaction.deleteSet])
lastOp.insertions = mergeIdSets([lastOp.insertions, insertions])
} else {
// create a new stack op
stack.push(new StackItem(transaction.deleteSet, insertions))
@@ -243,7 +241,7 @@ export class UndoManager extends ObservableV2 {
this.lastChange = now
}
// make sure that deleted structs are not gc'd
iterateDeletedStructs(transaction, transaction.deleteSet, /** @param {Item|GC} item */ item => {
iterateStructsByIdSet(transaction, transaction.deleteSet, /** @param {Item|GC} item */ item => {
if (item instanceof Item && this.scope.some(type => type === transaction.doc || isParentOf(/** @type {AbstractType<any>} */ (type), item))) {
keepItem(item, true)
}

View File

@@ -1,5 +1,4 @@
import {
isDeleted,
Item, AbstractType, Transaction, AbstractStruct // eslint-disable-line
} from '../internals.js'
@@ -78,7 +77,7 @@ export class YEvent {
* @return {boolean}
*/
deletes (struct) {
return isDeleted(this.transaction.deleteSet, struct.id)
return this.transaction.deleteSet.has(struct.id)
}
/**
@@ -158,7 +157,7 @@ export class YEvent {
* @return {boolean}
*/
adds (struct) {
return isDeleted(this.transaction.insertSet, struct.id)
return this.transaction.insertSet.has(struct.id)
}
/**

View File

@@ -20,7 +20,7 @@ import {
createID,
getStateVector,
readAndApplyDeleteSet,
writeDeleteSet,
writeIdSet,
createDeleteSetFromStructStore,
transact,
readItemContent,
@@ -36,8 +36,7 @@ import {
Skip,
diffUpdateV2,
convertUpdateFormatV2ToV1,
DeleteSet, DSDecoderV2, Doc, Transaction, GC, Item, StructStore, // eslint-disable-line
iterateDeletedStructs
IdSet, DSDecoderV2, Doc, Transaction, GC, Item, StructStore, // eslint-disable-line
} from '../internals.js'
import * as encoding from 'lib0/encoding'
@@ -105,7 +104,7 @@ export const writeClientsStructs = (encoder, store, _sm) => {
/**
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
* @param {StructStore} store
* @param {DeleteSet} idset
* @param {IdSet} idset
*
* @todo at the moment this writes the full deleteset range
*
@@ -118,7 +117,7 @@ export const writeStructsFromIdSet = (encoder, store, idset) => {
// Write items with higher client ids first
// This heavily improves the conflict algorithm.
array.from(idset.clients.entries()).sort((a, b) => b[0] - a[0]).forEach(([client, ids]) => {
writeStructs(encoder, /** @type {Array<GC|Item>} */ (store.clients.get(client)), client, ids[0].clock)
writeStructs(encoder, /** @type {Array<GC|Item>} */ (store.clients.get(client)), client, ids.getIds()[0].clock)
})
}
@@ -524,7 +523,7 @@ export const applyUpdate = (ydoc, update, transactionOrigin) => applyUpdateV2(yd
*/
export const writeStateAsUpdate = (encoder, doc, targetStateVector = new Map()) => {
writeClientsStructs(encoder, doc.store, targetStateVector)
writeDeleteSet(encoder, createDeleteSetFromStructStore(doc.store))
writeIdSet(encoder, createDeleteSetFromStructStore(doc.store))
}
/**

View File

@@ -24,15 +24,15 @@ import {
DSEncoderV2,
GC,
Item,
mergeDeleteSets,
readDeleteSet,
mergeIdSets,
readIdSet,
readItemContent,
Skip,
UpdateDecoderV1,
UpdateDecoderV2,
UpdateEncoderV1,
UpdateEncoderV2,
writeDeleteSet,
writeIdSet,
YXmlElement,
YXmlHook
} from '../internals.js'
@@ -128,7 +128,7 @@ export const logUpdateV2 = (update, YDecoder = UpdateDecoderV2) => {
structs.push(curr)
}
logging.print('Structs: ', structs)
const ds = readDeleteSet(updateDecoder)
const ds = readIdSet(updateDecoder)
logging.print('DeleteSet: ', ds)
}
@@ -152,7 +152,7 @@ export const decodeUpdateV2 = (update, YDecoder = UpdateDecoderV2) => {
}
return {
structs,
ds: readDeleteSet(updateDecoder)
ds: readIdSet(updateDecoder)
}
}
@@ -452,9 +452,9 @@ export const mergeUpdatesV2 = (updates, YDecoder = UpdateDecoderV2, YEncoder = U
}
finishLazyStructWriting(lazyStructEncoder)
const dss = updateDecoders.map(decoder => readDeleteSet(decoder))
const ds = mergeDeleteSets(dss)
writeDeleteSet(updateEncoder, ds)
const dss = updateDecoders.map(decoder => readIdSet(decoder))
const ds = mergeIdSets(dss)
writeIdSet(updateEncoder, ds)
return updateEncoder.toUint8Array()
}
@@ -495,8 +495,8 @@ export const diffUpdateV2 = (update, sv, YDecoder = UpdateDecoderV2, YEncoder =
}
finishLazyStructWriting(lazyStructWriter)
// write ds
const ds = readDeleteSet(decoder)
writeDeleteSet(encoder, ds)
const ds = readIdSet(decoder)
writeIdSet(encoder, ds)
return encoder.toUint8Array()
}
@@ -585,8 +585,8 @@ export const convertUpdateFormat = (update, blockTransformer, YDecoder, YEncoder
writeStructToLazyStructWriter(lazyWriter, blockTransformer(curr), 0)
}
finishLazyStructWriting(lazyWriter)
const ds = readDeleteSet(updateDecoder)
writeDeleteSet(updateEncoder, ds)
const ds = readIdSet(updateDecoder)
writeIdSet(updateEncoder, ds)
return updateEncoder.toUint8Array()
}

195
tests/IdSet.tests.js Normal file
View File

@@ -0,0 +1,195 @@
import * as t from 'lib0/testing'
import * as d from '../src/utils/IdSet.js'
import * as prng from 'lib0/prng'
import * as math from 'lib0/math'
/**
* @param {Array<[number, number, number]>} ops
*/
const simpleConstructIdSet = ops => {
const ds = d.createIdSet()
ops.forEach(op => {
d.addToIdSet(ds, op[0], op[1], op[2])
})
return ds
}
/**
* @param {d.IdSet} idSet1
* @param {d.IdSet} idSet2
*/
const compareIdSets = (idSet1, idSet2) => {
if (idSet1.clients.size !== idSet2.clients.size) return false
for (const [client, _items1] of idSet1.clients.entries()) {
const items1 = _items1.getIds()
const items2 = idSet2.clients.get(client)?.getIds()
t.assert(items2 !== undefined && items1.length === items2.length)
for (let i = 0; i < items1.length; i++) {
const di1 = items1[i]
const di2 = /** @type {Array<d.IdRange>} */ (items2)[i]
t.assert(di1.clock === di2.clock && di1.len === di2.len)
}
}
return true
}
/**
* @param {t.TestCase} _tc
*/
export const testIdsetMerge = _tc => {
t.group('filter out empty items (1))', () => {
compareIdSets(
simpleConstructIdSet([[0, 1, 0]]),
simpleConstructIdSet([])
)
})
t.group('filter out empty items (2))', () => {
compareIdSets(
simpleConstructIdSet([[0, 1, 0], [0, 2, 0]]),
simpleConstructIdSet([])
)
})
t.group('filter out empty items (3 - end))', () => {
compareIdSets(
simpleConstructIdSet([[0, 1, 1], [0, 2, 0]]),
simpleConstructIdSet([[0, 1, 1]])
)
})
t.group('filter out empty items (4 - middle))', () => {
compareIdSets(
simpleConstructIdSet([[0, 1, 1], [0, 2, 0], [0, 3, 1]]),
simpleConstructIdSet([[0, 1, 1], [0, 3, 1]])
)
})
t.group('filter out empty items (5 - beginning))', () => {
compareIdSets(
simpleConstructIdSet([[0, 1, 0], [0, 2, 1], [0, 3, 1]]),
simpleConstructIdSet([[0, 2, 1], [0, 3, 1]])
)
})
t.group('merge of overlapping id ranges', () => {
compareIdSets(
simpleConstructIdSet([[0, 1, 2], [0, 0, 2]]),
simpleConstructIdSet([[0, 0, 3]])
)
})
t.group('construct without hole', () => {
compareIdSets(
simpleConstructIdSet([[0, 1, 2], [0, 3, 1]]),
simpleConstructIdSet([[0, 1, 3]])
)
})
}
/**
* @param {t.TestCase} _tc
*/
export const testDiffing = _tc => {
t.group('simple case (1))', () => {
compareIdSets(
d.diffIdSets(
simpleConstructIdSet([[0, 1, 1], [0, 3, 1]]),
simpleConstructIdSet([[0, 3, 1]])
),
simpleConstructIdSet([[0, 1, 1]])
)
})
t.group('subset left', () => {
compareIdSets(
d.diffIdSets(
simpleConstructIdSet([[0, 1, 3]]),
simpleConstructIdSet([[0, 1, 1]])
),
simpleConstructIdSet([[0, 2, 2]])
)
})
t.group('subset right', () => {
compareIdSets(
d.diffIdSets(
simpleConstructIdSet([[0, 1, 3]]),
simpleConstructIdSet([[0, 3, 1]])
),
simpleConstructIdSet([[0, 1, 2]])
)
})
t.group('subset middle', () => {
compareIdSets(
d.diffIdSets(
simpleConstructIdSet([[0, 1, 3]]),
simpleConstructIdSet([[0, 2, 1]])
),
simpleConstructIdSet([[0, 1, 1], [0, 3, 1]])
)
})
t.group('overlapping left', () => {
compareIdSets(
d.diffIdSets(
simpleConstructIdSet([[0, 1, 3]]),
simpleConstructIdSet([[0, 0, 2]])
),
simpleConstructIdSet([[0, 2, 2]])
)
})
t.group('overlapping right', () => {
compareIdSets(
d.diffIdSets(
simpleConstructIdSet([[0, 1, 3]]),
simpleConstructIdSet([[0, 3, 5]])
),
simpleConstructIdSet([[0, 1, 2]])
)
})
t.group('overlapping completely', () => {
compareIdSets(
d.diffIdSets(
simpleConstructIdSet([[0, 1, 3]]),
simpleConstructIdSet([[0, 0, 5]])
),
simpleConstructIdSet([])
)
})
t.group('overlapping into new range', () => {
compareIdSets(
d.diffIdSets(
simpleConstructIdSet([[0, 1, 3], [0, 5, 2]]),
simpleConstructIdSet([[0, 0, 6]])
),
simpleConstructIdSet([[0, 6, 1]])
)
})
}
/**
* @param {prng.PRNG} gen
* @param {number} clients
* @param {number} clockRange (max clock - exclusive - by each client)
*/
const createRandomDiffSet = (gen, clients, clockRange) => {
const maxOpLen = 5
const numOfOps = math.ceil((clients * clockRange) / maxOpLen)
const ds = d.createIdSet()
for (let i = 0; i < numOfOps; i++) {
const client = prng.uint32(gen, 0, clients - 1)
const clockStart = prng.uint32(gen, 0, clockRange)
const len = prng.uint32(gen, 0, clockRange - clockStart)
d.addToIdSet(ds, client, clockStart, len)
}
if (ds.clients.size === clients && clients > 1 && prng.bool(gen)) {
ds.clients.delete(prng.uint32(gen, 0, clients))
}
return ds
}
/**
* @param {t.TestCase} tc
*/
export const testRepeatRandomDiffing = tc => {
const clients = 4
const clockRange = 100
const ds1 = createRandomDiffSet(tc.prng, clients, clockRange)
const ds2 = createRandomDiffSet(tc.prng, clients, clockRange)
const merged = d.mergeIdSets([ds1, ds2])
const e1 = d.diffIdSets(ds1, ds2)
const e2 = d.diffIdSets(merged, ds2)
compareIdSets(e1, e2)
}

View File

@@ -1,195 +0,0 @@
import * as t from 'lib0/testing'
import * as d from '../src/utils/DeleteSet.js'
import * as prng from 'lib0/prng'
import * as math from 'lib0/math'
/**
* @param {Array<[number, number, number]>} ops
*/
const simpleConstructDs = ops => {
const ds = new d.DeleteSet()
ops.forEach(op => {
d.addToDeleteSet(ds, op[0], op[1], op[2])
})
d.sortAndMergeDeleteSet(ds)
return ds
}
/**
* @param {d.DeleteSet} ds1
* @param {d.DeleteSet} ds2
*/
const compareDs = (ds1, ds2) => {
t.assert(ds1.clients.size === ds2.clients.size)
ds1.clients.forEach((ranges1, clientid) => {
const ranges2 = ds2.clients.get(clientid) ?? []
t.assert(ranges1.length === ranges2?.length)
for (let i = 0; i < ranges1.length; i++) {
const d1 = ranges1[i]
const d2 = ranges2[i]
t.assert(d1.len === d2.len && d1.clock == d2.clock)
}
})
}
/**
* @param {t.TestCase} _tc
*/
export const testDeletesetMerge = _tc => {
t.group('filter out empty items (1))', () => {
compareDs(
simpleConstructDs([[0, 1, 0]]),
simpleConstructDs([])
)
})
t.group('filter out empty items (2))', () => {
compareDs(
simpleConstructDs([[0, 1, 0], [0, 2, 0]]),
simpleConstructDs([])
)
})
t.group('filter out empty items (3 - end))', () => {
compareDs(
simpleConstructDs([[0, 1, 1], [0, 2, 0]]),
simpleConstructDs([[0, 1, 1]])
)
})
t.group('filter out empty items (4 - middle))', () => {
compareDs(
simpleConstructDs([[0, 1, 1], [0, 2, 0], [0, 3, 1]]),
simpleConstructDs([[0, 1, 1], [0, 3, 1]])
)
})
t.group('filter out empty items (5 - beginning))', () => {
compareDs(
simpleConstructDs([[0, 1, 0], [0, 2, 1], [0, 3, 1]]),
simpleConstructDs([[0, 2, 1], [0, 3, 1]])
)
})
t.group('merge of overlapping deletes', () => {
compareDs(
simpleConstructDs([[0, 1, 2], [0, 0, 2]]),
simpleConstructDs([[0, 0, 3]])
)
})
t.group('construct without hole', () => {
compareDs(
simpleConstructDs([[0, 1, 2], [0, 3, 1]]),
simpleConstructDs([[0, 1, 3]])
)
})
}
/**
* @param {t.TestCase} _tc
*/
export const testDeletesetDiffing = _tc => {
t.group('simple case (1))', () => {
compareDs(
d.diffDeleteSet(
simpleConstructDs([[0, 1, 1], [0, 3, 1]]),
simpleConstructDs([[0, 3, 1]])
),
simpleConstructDs([[0, 1, 1]])
)
})
t.group('subset left', () => {
compareDs(
d.diffDeleteSet(
simpleConstructDs([[0, 1, 3]]),
simpleConstructDs([[0, 1, 1]])
),
simpleConstructDs([[0, 2, 2]])
)
})
t.group('subset right', () => {
compareDs(
d.diffDeleteSet(
simpleConstructDs([[0, 1, 3]]),
simpleConstructDs([[0, 3, 1]])
),
simpleConstructDs([[0, 1, 2]])
)
})
t.group('subset middle', () => {
compareDs(
d.diffDeleteSet(
simpleConstructDs([[0, 1, 3]]),
simpleConstructDs([[0, 2, 1]])
),
simpleConstructDs([[0, 1, 1], [0, 3, 1]])
)
})
t.group('overlapping left', () => {
compareDs(
d.diffDeleteSet(
simpleConstructDs([[0, 1, 3]]),
simpleConstructDs([[0, 0, 2]])
),
simpleConstructDs([[0, 2, 2]])
)
})
t.group('overlapping right', () => {
compareDs(
d.diffDeleteSet(
simpleConstructDs([[0, 1, 3]]),
simpleConstructDs([[0, 3, 5]])
),
simpleConstructDs([[0, 1, 2]])
)
})
t.group('overlapping completely', () => {
compareDs(
d.diffDeleteSet(
simpleConstructDs([[0, 1, 3]]),
simpleConstructDs([[0, 0, 5]])
),
simpleConstructDs([])
)
})
t.group('overlapping into new range', () => {
compareDs(
d.diffDeleteSet(
simpleConstructDs([[0, 1, 3], [0, 5, 2]]),
simpleConstructDs([[0, 0, 6]])
),
simpleConstructDs([[0, 6, 1]])
)
})
}
/**
* @param {prng.PRNG} gen
* @param {number} clients
* @param {number} clockRange (max clock - exclusive - by each client)
*/
const createRandomDiffSet = (gen, clients, clockRange) => {
const maxOpLen = 5
const numOfOps = math.ceil((clients * clockRange) / maxOpLen)
const ds = new d.DeleteSet()
for (let i = 0; i < numOfOps; i++) {
const client = prng.uint32(gen, 0, clients - 1)
const clockStart = prng.uint32(gen, 0, clockRange)
const len = prng.uint32(gen, 0, clockRange - clockStart)
d.addToDeleteSet(ds, client, clockStart, len)
}
d.sortAndMergeDeleteSet(ds)
if (ds.clients.size === clients && clients > 1 && prng.bool(gen)) {
ds.clients.delete(prng.uint32(gen, 0, clients))
}
return ds
}
/**
* @param {t.TestCase} tc
*/
export const testRepeatRandomDiffing = tc => {
const clients = 4
const clockRange = 100
const ds1 = createRandomDiffSet(tc.prng, clients, clockRange)
const ds2 = createRandomDiffSet(tc.prng, clients, clockRange)
const merged = d.mergeDeleteSets([ds1, ds2])
const e1 = d.diffDeleteSet(ds1, ds2)
const e2 = d.diffDeleteSet(merged, ds2)
compareDs(e1, e2)
}

View File

@@ -8,4 +8,3 @@ export const testDelta = _tc => {
const d = delta.create().insert('hello').insert(' ').useAttributes({ bold: true }).insert('world').useAttribution({ creator: 'tester' }).insert('!').done()
t.compare(d.toJSON().ops, [{ insert: 'hello ' }, { insert: 'world', attributes: { bold: true } }, { insert: '!', attributes: { bold: true }, attribution: { creator: 'tester' } }])
}

View File

@@ -12,7 +12,7 @@ import * as snapshot from './snapshot.tests.js'
import * as updates from './updates.tests.js'
import * as relativePositions from './relativePositions.tests.js'
import * as delta from './delta.tests.js'
import * as deleteset from './deleteset.tests.js'
import * as idset from './IdSet.tests.js'
import { runTests } from 'lib0/testing'
import { isBrowser, isNode } from 'lib0/environment'
@@ -23,7 +23,7 @@ if (isBrowser) {
}
const tests = {
doc, map, array, text, xml, encoding, undoredo, compatibility, snapshot, updates, relativePositions, delta, deleteset
doc, map, array, text, xml, encoding, undoredo, compatibility, snapshot, updates, relativePositions, delta, idset
}
const run = async () => {

View File

@@ -6,6 +6,7 @@ import * as syncProtocol from 'y-protocols/sync'
import * as object from 'lib0/object'
import * as map from 'lib0/map'
import * as Y from '../src/index.js'
export * from '../src/index.js'
if (typeof window !== 'undefined') {
@@ -93,7 +94,8 @@ export class TestYInstance extends Y.Doc {
this.updates.push(update)
})
this.on('afterTransaction', tr => {
if (Array.from(tr.insertSet.clients.values()).some(ids => ids.length !== 1)) {
// @ts-ignore
if (Array.from(tr.insertSet.clients.values()).some(ids => ids._ids.length !== 1)) {
throw new Error('Currently, we expect that idset contains exactly one item per client.')
}
})
@@ -360,7 +362,7 @@ export const compare = users => {
return true
})
t.compare(Y.encodeStateVector(users[i]), Y.encodeStateVector(users[i + 1]))
Y.equalDeleteSets(Y.createDeleteSetFromStructStore(users[i].store), Y.createDeleteSetFromStructStore(users[i + 1].store))
Y.equalIdSets(Y.createDeleteSetFromStructStore(users[i].store), Y.createDeleteSetFromStructStore(users[i + 1].store))
compareStructStores(users[i].store, users[i + 1].store)
t.compare(Y.encodeSnapshot(Y.snapshot(users[i])), Y.encodeSnapshot(Y.snapshot(users[i + 1])))
}

View File

@@ -1,7 +1,7 @@
import * as t from 'lib0/testing'
import * as Y from '../src/index.js'
import { init, compare } from './testHelper.js' // eslint-disable-line
import { readClientsStructRefs, readDeleteSet, UpdateDecoderV2, UpdateEncoderV2, writeDeleteSet } from '../src/internals.js'
import { readClientsStructRefs, readIdSet, UpdateDecoderV2, UpdateEncoderV2, writeIdSet } from '../src/internals.js'
import * as encoding from 'lib0/encoding'
import * as decoding from 'lib0/decoding'
import * as object from 'lib0/object'
@@ -193,10 +193,10 @@ const checkUpdateCases = (ydoc, updates, enc, hasDeletes) => {
const decoder = decoding.createDecoder(diffed)
const updateDecoder = new UpdateDecoderV2(decoder)
readClientsStructRefs(updateDecoder, new Y.Doc())
const ds = readDeleteSet(updateDecoder)
const ds = readIdSet(updateDecoder)
const updateEncoder = new UpdateEncoderV2()
encoding.writeVarUint(updateEncoder.restEncoder, 0) // 0 structs
writeDeleteSet(updateEncoder, ds)
writeIdSet(updateEncoder, ds)
const deletesUpdate = updateEncoder.toUint8Array()
const mergedDeletes = Y.mergeUpdatesV2([deletesUpdate, partMerged])
if (!hasDeletes || enc !== encDoc) {