implement StructSet abstraction

This commit is contained in:
Kevin Jahns
2025-06-05 14:52:55 +02:00
parent c37ee3ee8c
commit e62e1d7c53
14 changed files with 257 additions and 165 deletions

View File

@@ -105,7 +105,7 @@ export {
IdMap,
createIdMap,
createAttributionItem,
createInsertionSetFromStructStore,
createInsertSetFromStructStore as createInsertionSetFromStructStore,
diffIdMap,
diffIdSet,
AttributionItem as Attribution,

View File

@@ -16,6 +16,9 @@ export * from './utils/Transaction.js'
export * from './utils/UndoManager.js'
export * from './utils/updates.js'
export * from './utils/YEvent.js'
export * from './utils/StructSet.js'
export * from './utils/IdMap.js'
export * from './utils/AttributionManager.js'
export * from './types/AbstractType.js'
export * from './types/YArray.js'
@@ -40,5 +43,3 @@ export * from './structs/ContentString.js'
export * from './structs/ContentType.js'
export * from './structs/Item.js'
export * from './structs/Skip.js'
export * from './utils/IdMap.js'
export * from './utils/AttributionManager.js'

View File

@@ -74,7 +74,7 @@ export const keepItem = (item, keep) => {
/**
* Split leftItem into two items
* @param {Transaction} transaction
* @param {Transaction?} transaction
* @param {Item} leftItem
* @param {number} diff
* @return {Item}
@@ -104,17 +104,19 @@ export const splitItem = (transaction, leftItem, diff) => {
if (leftItem.redone !== null) {
rightItem.redone = createID(leftItem.redone.client, leftItem.redone.clock + diff)
}
// update left (do not set leftItem.rightOrigin as it will lead to problems when syncing)
leftItem.right = rightItem
// update right
if (rightItem.right !== null) {
rightItem.right.left = rightItem
}
// right is more specific.
transaction._mergeStructs.push(rightItem)
// update parent._map
if (rightItem.parentSub !== null && rightItem.right === null) {
/** @type {AbstractType<any>} */ (rightItem.parent)._map.set(rightItem.parentSub, rightItem)
if (transaction != null) {
// update left (do not set leftItem.rightOrigin as it will lead to problems when syncing)
leftItem.right = rightItem
// update right
if (rightItem.right !== null) {
rightItem.right.left = rightItem
}
// right is more specific.
transaction._mergeStructs.push(rightItem)
// update parent._map
if (rightItem.parentSub !== null && rightItem.right === null) {
/** @type {AbstractType<any>} */ (rightItem.parent)._map.set(rightItem.parentSub, rightItem)
}
}
leftItem.length = diff
return rightItem

View File

@@ -249,7 +249,7 @@ const insertNegatedAttributes = (transaction, parent, currPos, negatedAttributes
// check if we really need to remove attributes
while (
currPos.right !== null && (
(currPos.right.deleted && (currPos.am == noAttributionsManager || currPos.am.contentLength(currPos.right) === 0)) || (
(currPos.right.deleted && (currPos.am === noAttributionsManager || currPos.am.contentLength(currPos.right) === 0)) || (
currPos.right.content.constructor === ContentFormat &&
equalAttrs(negatedAttributes.get(/** @type {ContentFormat} */ (currPos.right.content).key), /** @type {ContentFormat} */ (currPos.right.content).value)
)

View File

@@ -1,7 +1,7 @@
import {
getItem,
diffIdSet,
createInsertionSetFromStructStore,
createInsertSetFromStructStore,
createDeleteSetFromStructStore,
createIdMapFromIdSet,
ContentDeleted,
@@ -236,8 +236,8 @@ export class DiffAttributionManager extends ObservableV2 {
*/
constructor (prevDoc, nextDoc) {
super()
const _nextDocInserts = createInsertionSetFromStructStore(nextDoc.store, false) // unmaintained
const _prevDocInserts = createInsertionSetFromStructStore(prevDoc.store, false) // unmaintained
const _nextDocInserts = createInsertSetFromStructStore(nextDoc.store, false) // unmaintained
const _prevDocInserts = createInsertSetFromStructStore(prevDoc.store, false) // unmaintained
const nextDocDeletes = createDeleteSetFromStructStore(nextDoc.store) // maintained
const prevDocDeletes = createDeleteSetFromStructStore(prevDoc.store) // maintained
this.inserts = createIdMapFromIdSet(diffIdSet(_nextDocInserts, _prevDocInserts), [])
@@ -291,7 +291,7 @@ export class DiffAttributionManager extends ObservableV2 {
this._afterTrListener = nextDoc.on('afterTransaction', (tr) => {
// apply deletes on attributed deletes (content that is already deleted, but is rendered by
// the attribution manager)
if (!this.suggestionMode && tr.local && (this.suggestionOrigins == null || this.suggestionOrigins.some(o => o === origin))) {
if (!this.suggestionMode && tr.local && (this.suggestionOrigins == null || this.suggestionOrigins.some(o => o === tr.origin))) {
const attributedDeletes = tr.meta.get('attributedDeletes')
if (attributedDeletes != null) {
transact(prevDoc, () => {
@@ -456,7 +456,7 @@ export class SnapshotAttributionManager extends ObservableV2 {
* @return {number}
*/
contentLength (item) {
return item.content.isCountable()
return item.content.isCountable()
? (item.deleted
? this.attrs.sliceId(item.id, item.length).reduce((len, s) => s.attrs != null ? len + s.len : len, 0)
: item.length

View File

@@ -62,7 +62,7 @@ export class Doc extends ObservableV2 {
/**
* @param {DocOpts} opts configuration
*/
constructor ({ guid = random.uuidv4(), collectionid = null, gc = true, gcFilter = () => true, meta = null, autoLoad = false, shouldLoad = true, isSuggestionDoc = true } = {}) {
constructor ({ guid = random.uuidv4(), collectionid = null, gc = true, gcFilter = () => true, meta = null, autoLoad = false, shouldLoad = true, isSuggestionDoc = false} = {}) {
super()
this.gc = gc
this.gcFilter = gcFilter

View File

@@ -79,12 +79,18 @@ export class MaybeIdRange {
*/
export const createMaybeIdRange = (clock, len, exists) => new MaybeIdRange(clock, len, exists)
class IdRanges {
export class IdRanges {
/**
* @param {Array<IdRange>} ids
*/
constructor (ids) {
this.sorted = false
/**
* A typical use-case for IdSet is to append data. We heavily optimize this case by allowing the
* last item to be mutated ef it isn't used currently.
* This flag is true if the last item was exposed to the outside.
*/
this._lastIsUsed = false
/**
* @private
*/
@@ -102,7 +108,12 @@ class IdRanges {
add (clock, length) {
const last = this._ids[this._ids.length - 1]
if (last.clock + last.len === clock) {
this._ids[this._ids.length - 1] = new IdRange(last.clock, last.len + length)
if (this._lastIsUsed) {
this._ids[this._ids.length - 1] = new IdRange(last.clock, last.len + length)
this._lastIsUsed = false
} else {
this._ids[this._ids.length - 1].len += length
}
} else {
this.sorted = false
this._ids.push(new IdRange(clock, length))
@@ -110,10 +121,11 @@ class IdRanges {
}
/**
* Return the list of id ranges, sorted and merged.
* Return the list of immutable id ranges, sorted and merged.
*/
getIds () {
const ids = this._ids
this._lastIsUsed = true
if (!this.sorted) {
this.sorted = true
ids.sort((a, b) => a.clock - b.clock)
@@ -153,6 +165,10 @@ export class IdSet {
this.clients = new Map()
}
isEmpty () {
return this.clients.size === 0
}
/**
* @param {(idrange:IdRange, client:number) => void} f
*/
@@ -605,32 +621,42 @@ export const createDeleteSetFromStructStore = ss => {
return ds
}
/**
* @param {Array<GC | Item>} structs
* @param {boolean} filterDeleted
*
*/
export const _createInsertSliceFromStructs = (structs, filterDeleted) => {
/**
* @type {Array<IdRange>}
*/
const iditems = []
for (let i = 0; i < structs.length; i++) {
const struct = structs[i]
if (!(filterDeleted && struct.deleted)) {
const clock = struct.id.clock
let len = struct.length
if (i + 1 < structs.length) {
// eslint-disable-next-line
for (let next = structs[i + 1]; i + 1 < structs.length && !(filterDeleted && next.deleted); next = structs[++i + 1]) {
len += next.length
}
}
iditems.push(new IdRange(clock, len))
}
}
return iditems
}
/**
* @param {import('../internals.js').StructStore} ss
* @param {boolean} filterDeleted
*/
export const createInsertionSetFromStructStore = (ss, filterDeleted) => {
export const createInsertSetFromStructStore = (ss, filterDeleted) => {
const idset = createIdSet()
ss.clients.forEach((structs, client) => {
/**
* @type {Array<IdRange>}
*/
const iditems = []
for (let i = 0; i < structs.length; i++) {
const struct = structs[i]
if (!(filterDeleted && struct.deleted)) {
const clock = struct.id.clock
let len = struct.length
if (i + 1 < structs.length) {
// eslint-disable-next-line
for (let next = structs[i + 1]; i + 1 < structs.length && !(filterDeleted && next.deleted); next = structs[++i + 1]) {
len += next.length
}
}
iditems.push(new IdRange(clock, len))
}
}
if (iditems.length > 0) {
const iditems = _createInsertSliceFromStructs(structs, filterDeleted)
if (iditems.length !== 0) {
idset.clients.set(client, new IdRanges(iditems))
}
})

View File

@@ -9,8 +9,7 @@ import {
ContentType,
followRedone,
getItem,
StructStore, ID, Doc, AbstractType,
noAttributionsManager, // eslint-disable-line
StructStore, ID, Doc, AbstractType, noAttributionsManager, // eslint-disable-line
} from '../internals.js'
import * as encoding from 'lib0/encoding'

136
src/utils/StructSet.js Normal file
View File

@@ -0,0 +1,136 @@
import {
createID,
readItemContent,
findIndexCleanStart,
Skip,
UpdateDecoderV1, UpdateDecoderV2, IdSet, Doc, GC, Item, ID, // eslint-disable-line
} from '../internals.js'
import * as decoding from 'lib0/decoding'
import * as binary from 'lib0/binary'
import * as map from 'lib0/map'
/**
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder The decoder object to read data from.
* @param {Doc} doc
* @return {StructSet}
*
* @private
* @function
*/
export const readStructSet = (decoder, doc) => {
const clientRefs = new StructSet()
const numOfStateUpdates = decoding.readVarUint(decoder.restDecoder)
for (let i = 0; i < numOfStateUpdates; i++) {
const numberOfStructs = decoding.readVarUint(decoder.restDecoder)
/**
* @type {Array<GC|Item>}
*/
const refs = new Array(numberOfStructs)
const client = decoder.readClient()
let clock = decoding.readVarUint(decoder.restDecoder)
clientRefs.clients.set(client, new StructRange(refs))
for (let i = 0; i < numberOfStructs; i++) {
const info = decoder.readInfo()
switch (binary.BITS5 & info) {
case 0: { // GC
const len = decoder.readLen()
refs[i] = new GC(createID(client, clock), len)
clock += len
break
}
case 10: { // Skip Struct (nothing to apply)
// @todo we could reduce the amount of checks by adding Skip struct to clientRefs so we know that something is missing.
const len = decoding.readVarUint(decoder.restDecoder)
refs[i] = new Skip(createID(client, clock), len)
clock += len
break
}
default: { // Item with content
/**
* The optimized implementation doesn't use any variables because inlining variables is faster.
* Below a non-optimized version is shown that implements the basic algorithm with
* a few comments
*/
const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0
// If parent = null and neither left nor right are defined, then we know that `parent` is child of `y`
// and we read the next string as parentYKey.
// It indicates how we store/retrieve parent from `y.share`
// @type {string|null}
const struct = new Item(
createID(client, clock),
null, // left
(info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null, // origin
null, // right
(info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null, // right origin
cantCopyParentInfo ? (decoder.readParentInfo() ? doc.get(decoder.readString()) : decoder.readLeftID()) : null, // parent
cantCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoder.readString() : null, // parentSub
readItemContent(decoder, info) // item content
)
refs[i] = struct
clock += struct.length
}
}
}
}
return clientRefs
}
/**
* Remove item-ranges from the StructSet.
*
* @param {StructSet} ss
* @param {IdSet} exclude
*/
export const removeRangesFromStructSet = (ss, exclude) => {
exclude.clients.forEach((range, client) => {
const structs = /** @type {StructRange} */ (ss.clients.get(client))?.refs
if (structs != null) {
const firstStruct = structs[0]
const lastStruct = structs[structs.length - 1]
const idranges = range.getIds()
for (let i = 0; i < idranges.length; i++) {
const range = idranges[i]
let startIndex = 0
let endIndex = structs.length
if (range.clock >= lastStruct.id.clock + lastStruct.length) continue
if (range.clock > firstStruct.id.clock) {
startIndex = findIndexCleanStart(null, structs, range.clock)
}
if (range.clock + range.len <= firstStruct.id.clock) continue
if (range.clock + range.len < lastStruct.id.clock + lastStruct.length) {
endIndex = findIndexCleanStart(null, structs, range.clock + range.len)
}
if (startIndex < endIndex) {
structs[startIndex] = new Skip(new ID(client, range.clock), range.len)
const d = endIndex - startIndex
if (d > 1) {
structs.splice(startIndex, d)
}
}
}
}
})
}
class StructRange {
/**
* @param {Array<Item|GC>} refs
*/
constructor (refs) {
this.i = 0
/**
* @type {Array<Item | GC>}
*/
this.refs = refs
}
}
export class StructSet {
constructor () {
/**
* @type {Map<number, StructRange>}
*/
this.clients = map.create()
}
}

View File

@@ -1,8 +1,9 @@
import {
GC,
splitItem,
Transaction, ID, Item, // eslint-disable-line
createDeleteSetFromStructStore
createDeleteSetFromStructStore,
createIdSet,
Transaction, ID, Item // eslint-disable-line
} from '../internals.js'
import * as math from 'lib0/math'
@@ -23,6 +24,7 @@ export class StructStore {
* @type {null | Uint8Array}
*/
this.pendingDs = null
this.skips = createIdSet()
}
get ds () {
@@ -46,6 +48,9 @@ export const getStateVector = store => {
const struct = structs[structs.length - 1]
sm.set(client, struct.id.clock + struct.length)
})
store.skips.clients.forEach((range, client) => {
sm.set(client, range.getIds()[0].clock)
})
return sm
}
@@ -171,7 +176,7 @@ export const find = (store, id) => {
export const getItem = /** @type {function(StructStore,ID):Item} */ (find)
/**
* @param {Transaction} transaction
* @param {Transaction?} transaction
* @param {Array<Item|GC>} structs
* @param {number} clock
*/

View File

@@ -17,12 +17,10 @@
import {
findIndexSS,
getState,
createID,
getStateVector,
readAndApplyDeleteSet,
writeIdSet,
transact,
readItemContent,
UpdateDecoderV1,
UpdateDecoderV2,
UpdateEncoderV1,
@@ -35,12 +33,14 @@ import {
Skip,
diffUpdateV2,
convertUpdateFormatV2ToV1,
IdSet, DSDecoderV2, Doc, Transaction, GC, Item, StructStore, createDeleteSetFromStructStore, // eslint-disable-line
readStructSet,
removeRangesFromStructSet,
createIdSet,
StructSet, IdSet, DSDecoderV2, Doc, Transaction, GC, Item, StructStore // eslint-disable-line
} from '../internals.js'
import * as encoding from 'lib0/encoding'
import * as decoding from 'lib0/decoding'
import * as binary from 'lib0/binary'
import * as map from 'lib0/map'
import * as math from 'lib0/math'
import * as array from 'lib0/array'
@@ -120,102 +120,6 @@ export const writeStructsFromIdSet = (encoder, store, idset) => {
})
}
/**
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder The decoder object to read data from.
* @param {Doc} doc
* @return {Map<number, { i: number, refs: Array<Item | GC> }>}
*
* @private
* @function
*/
export const readClientsStructRefs = (decoder, doc) => {
/**
* @type {Map<number, { i: number, refs: Array<Item | GC> }>}
*/
const clientRefs = map.create()
const numOfStateUpdates = decoding.readVarUint(decoder.restDecoder)
for (let i = 0; i < numOfStateUpdates; i++) {
const numberOfStructs = decoding.readVarUint(decoder.restDecoder)
/**
* @type {Array<GC|Item>}
*/
const refs = new Array(numberOfStructs)
const client = decoder.readClient()
let clock = decoding.readVarUint(decoder.restDecoder)
// const start = performance.now()
clientRefs.set(client, { i: 0, refs })
for (let i = 0; i < numberOfStructs; i++) {
const info = decoder.readInfo()
switch (binary.BITS5 & info) {
case 0: { // GC
const len = decoder.readLen()
refs[i] = new GC(createID(client, clock), len)
clock += len
break
}
case 10: { // Skip Struct (nothing to apply)
// @todo we could reduce the amount of checks by adding Skip struct to clientRefs so we know that something is missing.
const len = decoding.readVarUint(decoder.restDecoder)
refs[i] = new Skip(createID(client, clock), len)
clock += len
break
}
default: { // Item with content
/**
* The optimized implementation doesn't use any variables because inlining variables is faster.
* Below a non-optimized version is shown that implements the basic algorithm with
* a few comments
*/
const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0
// If parent = null and neither left nor right are defined, then we know that `parent` is child of `y`
// and we read the next string as parentYKey.
// It indicates how we store/retrieve parent from `y.share`
// @type {string|null}
const struct = new Item(
createID(client, clock),
null, // left
(info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null, // origin
null, // right
(info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null, // right origin
cantCopyParentInfo ? (decoder.readParentInfo() ? doc.get(decoder.readString()) : decoder.readLeftID()) : null, // parent
cantCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoder.readString() : null, // parentSub
readItemContent(decoder, info) // item content
)
/* A non-optimized implementation of the above algorithm:
// The item that was originally to the left of this item.
const origin = (info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null
// The item that was originally to the right of this item.
const rightOrigin = (info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null
const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0
const hasParentYKey = cantCopyParentInfo ? decoder.readParentInfo() : false
// If parent = null and neither left nor right are defined, then we know that `parent` is child of `y`
// and we read the next string as parentYKey.
// It indicates how we store/retrieve parent from `y.share`
// @type {string|null}
const parentYKey = cantCopyParentInfo && hasParentYKey ? decoder.readString() : null
const struct = new Item(
createID(client, clock),
null, // left
origin, // origin
null, // right
rightOrigin, // right origin
cantCopyParentInfo && !hasParentYKey ? decoder.readLeftID() : (parentYKey !== null ? doc.get(parentYKey) : null), // parent
cantCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoder.readString() : null, // parentSub
readItemContent(decoder, info) // item content
)
*/
refs[i] = struct
clock += struct.length
}
}
}
// console.log('time to read: ', performance.now() - start) // @todo remove
}
return clientRefs
}
/**
* Resume computing structs generated by struct readers.
*
@@ -237,7 +141,7 @@ export const readClientsStructRefs = (decoder, doc) => {
*
* @param {Transaction} transaction
* @param {StructStore} store
* @param {Map<number, { i: number, refs: (GC | Item)[] }>} clientsStructRefs
* @param {StructSet} clientsStructRefs
* @return { null | { update: Uint8Array, missing: Map<number,number> } }
*
* @private
@@ -249,7 +153,7 @@ const integrateStructs = (transaction, store, clientsStructRefs) => {
*/
const stack = []
// sort them so that we take the higher id first, in case of conflicts the lower id will probably not conflict with the id from the higher user.
let clientsStructRefsIds = array.from(clientsStructRefs.keys()).sort((a, b) => a - b)
let clientsStructRefsIds = array.from(clientsStructRefs.clients.keys()).sort((a, b) => a - b)
if (clientsStructRefsIds.length === 0) {
return null
}
@@ -257,11 +161,11 @@ const integrateStructs = (transaction, store, clientsStructRefs) => {
if (clientsStructRefsIds.length === 0) {
return null
}
let nextStructsTarget = /** @type {{i:number,refs:Array<GC|Item>}} */ (clientsStructRefs.get(clientsStructRefsIds[clientsStructRefsIds.length - 1]))
let nextStructsTarget = /** @type {{i:number,refs:Array<GC|Item>}} */ (clientsStructRefs.clients.get(clientsStructRefsIds[clientsStructRefsIds.length - 1]))
while (nextStructsTarget.refs.length === nextStructsTarget.i) {
clientsStructRefsIds.pop()
if (clientsStructRefsIds.length > 0) {
nextStructsTarget = /** @type {{i:number,refs:Array<GC|Item>}} */ (clientsStructRefs.get(clientsStructRefsIds[clientsStructRefsIds.length - 1]))
nextStructsTarget = /** @type {{i:number,refs:Array<GC|Item>}} */ (clientsStructRefs.clients.get(clientsStructRefsIds[clientsStructRefsIds.length - 1]))
} else {
return null
}
@@ -295,15 +199,21 @@ const integrateStructs = (transaction, store, clientsStructRefs) => {
// caching the state because it is used very often
const state = new Map()
// // caching the state because it is used very often
// const currentInsertSet = createIdSet()
// clientsStructRefsIds.forEach(clientId => {
// currentInsertSet.clients.set(clientid, new IdRanges(_createInsertSliceFromStructs(store.clients.get(clientId) ?? [], false)))
// })
const addStackToRestSS = () => {
for (const item of stack) {
const client = item.id.client
const inapplicableItems = clientsStructRefs.get(client)
const inapplicableItems = clientsStructRefs.clients.get(client)
if (inapplicableItems) {
// decrement because we weren't able to apply previous operation
inapplicableItems.i--
restStructs.clients.set(client, inapplicableItems.refs.slice(inapplicableItems.i))
clientsStructRefs.delete(client)
clientsStructRefs.clients.delete(client)
inapplicableItems.i = 0
inapplicableItems.refs = []
} else {
@@ -335,7 +245,7 @@ const integrateStructs = (transaction, store, clientsStructRefs) => {
/**
* @type {{ refs: Array<GC|Item>, i: number }}
*/
const structRefs = clientsStructRefs.get(/** @type {number} */ (missing)) || { refs: [], i: 0 }
const structRefs = clientsStructRefs.clients.get(/** @type {number} */ (missing)) || { refs: [], i: 0 }
if (structRefs.refs.length === structRefs.i) {
// This update message causally depends on another update message that doesn't exist yet
updateMissingSv(/** @type {number} */ (missing), getState(store, missing))
@@ -346,7 +256,7 @@ const integrateStructs = (transaction, store, clientsStructRefs) => {
}
} else if (offset === 0 || offset < stackHead.length) {
// all fine, apply the stackhead
stackHead.integrate(transaction, offset)
stackHead.integrate(transaction, offset) // since I'm splitting structs before integrating them, offset is no longer necessary
state.set(stackHead.id.client, stackHead.id.clock + stackHead.length)
}
}
@@ -406,7 +316,20 @@ export const readUpdateV2 = (decoder, ydoc, transactionOrigin, structDecoder = n
const doc = transaction.doc
const store = doc.store
// let start = performance.now()
const ss = readClientsStructRefs(structDecoder, doc)
const ss = readStructSet(structDecoder, doc)
const knownState = createIdSet()
ss.clients.forEach((_, client) => {
const storeStructs = store.clients.get(client)
if (storeStructs) {
knownState.add(client, 0, storeStructs.length)
// remove known items from ss
store.skips.clients.get(client)?.getIds().forEach(idrange => {
knownState.delete(client, idrange.clock, idrange.len)
})
}
})
// remove known items from ss
removeRangesFromStructSet(ss, knownState)
// console.log('time to read structs: ', performance.now() - start) // @todo remove
// start = performance.now()
// console.log('time to merge: ', performance.now() - start) // @todo remove

View File

@@ -18,7 +18,7 @@ export const testRelativePositions = _tc => {
ytext.insert(0, 'hello world')
const v1 = Y.cloneDoc(ydoc)
ytext.delete(1, 6)
ytext.insert(1, 'x', )
ytext.insert(1, 'x')
const am = Y.createAttributionManagerFromDiff(v1, ydoc)
const rel = Y.createRelativePositionFromTypeIndex(ytext, 9, 1, am) // pos after "hello wo"
const abs1 = Y.createAbsolutePositionFromRelativePosition(rel, ydoc, true, am)

View File

@@ -1,7 +1,7 @@
import * as t from 'lib0/testing'
import * as Y from '../src/index.js'
import { init, compare } from './testHelper.js' // eslint-disable-line
import { readClientsStructRefs, readIdSet, UpdateDecoderV2, UpdateEncoderV2, writeIdSet } from '../src/internals.js'
import { readStructSet, readIdSet, UpdateDecoderV2, UpdateEncoderV2, writeIdSet } from '../src/internals.js'
import * as encoding from 'lib0/encoding'
import * as decoding from 'lib0/decoding'
import * as object from 'lib0/object'
@@ -192,7 +192,7 @@ const checkUpdateCases = (ydoc, updates, enc, hasDeletes) => {
// So we add all deletes from `diffed` to `partDeletes` and compare then
const decoder = decoding.createDecoder(diffed)
const updateDecoder = new UpdateDecoderV2(decoder)
readClientsStructRefs(updateDecoder, new Y.Doc())
readStructSet(updateDecoder, new Y.Doc())
const ds = readIdSet(updateDecoder)
const updateEncoder = new UpdateEncoderV2()
encoding.writeVarUint(updateEncoder.restEncoder, 0) // 0 structs

View File

@@ -364,14 +364,14 @@ export const testElementAttributedContentViaDiffer = _tc => {
t.group('test getContentDeep both docs synced', () => {
t.info('expecting diffingAttributionManager to auto update itself')
const expectedContent = delta.createArrayDelta().insert([{ nodeName: 'span', children: delta.createArrayDelta(), attributes: {} }]).insert([
delta.createTextDelta().insert('bigworld', null, { acceptInsert: ['unknown'] })
], null, { acceptInsert: ['unknown'] })
delta.createTextDelta().insert('bigworld')
])
const attributedContent = yelement.getContentDeep(attributionManager)
console.log('children', JSON.stringify(attributedContent.children.toJSON(), null, 2))
console.log('cs expec', JSON.stringify(expectedContent.toJSON(), null, 2))
console.log('attributes', attributedContent.attributes)
t.assert(attributedContent.children.equals(expectedContent))
t.compare(attributedContent.attributes, { key: { prevValue: undefined, value: '42', attribution: { acceptInsert: ['unknown'] } } })
t.compare(attributedContent.attributes, { key: { prevValue: undefined, value: '42', attribution: null } })
t.assert(attributedContent.nodeName === 'UNDEFINED')
})
}