2019-04-04 19:35:38 +02:00
|
|
|
|
2019-04-11 13:18:35 +02:00
|
|
|
/**
|
|
|
|
|
* @module encoding
|
2019-05-28 14:18:20 +02:00
|
|
|
*
|
|
|
|
|
* We use the first five bits in the info flag for determining the type of the struct.
|
|
|
|
|
*
|
|
|
|
|
* 0: GC
|
|
|
|
|
* 1: Item with Deleted content
|
|
|
|
|
* 2: Item with JSON content
|
|
|
|
|
* 3: Item with Binary content
|
|
|
|
|
* 4: Item with String content
|
|
|
|
|
* 5: Item with Embed content (for richtext content)
|
|
|
|
|
* 6: Item with Format content (a formatting marker for richtext content)
|
|
|
|
|
* 7: Item with Type
|
2019-04-11 13:18:35 +02:00
|
|
|
*/
|
|
|
|
|
|
2019-04-04 19:35:38 +02:00
|
|
|
import {
|
|
|
|
|
findIndexSS,
|
|
|
|
|
writeID,
|
|
|
|
|
readID,
|
2019-04-05 00:37:09 +02:00
|
|
|
getState,
|
2020-06-02 23:20:45 +02:00
|
|
|
createID,
|
2019-05-07 13:44:23 +02:00
|
|
|
getStateVector,
|
2019-09-03 16:33:29 +02:00
|
|
|
readAndApplyDeleteSet,
|
2019-04-07 23:08:08 +02:00
|
|
|
writeDeleteSet,
|
|
|
|
|
createDeleteSetFromStructStore,
|
2019-09-17 18:53:59 +02:00
|
|
|
transact,
|
2020-06-09 00:53:05 +02:00
|
|
|
readItem,
|
2020-06-02 23:20:45 +02:00
|
|
|
Doc, Transaction, GC, Item, StructStore, ID // eslint-disable-line
|
2019-04-04 19:35:38 +02:00
|
|
|
} from '../internals.js'
|
|
|
|
|
|
2019-03-26 01:14:15 +01:00
|
|
|
import * as encoding from 'lib0/encoding.js'
|
|
|
|
|
import * as decoding from 'lib0/decoding.js'
|
|
|
|
|
import * as binary from 'lib0/binary.js'
|
2019-04-02 23:08:58 +02:00
|
|
|
|
2019-03-26 01:14:15 +01:00
|
|
|
/**
|
2019-04-10 18:52:22 +02:00
|
|
|
* @param {encoding.Encoder} encoder
|
2020-06-02 23:20:45 +02:00
|
|
|
* @param {Array<GC|Item>} structs All structs by `client`
|
2019-04-10 18:52:22 +02:00
|
|
|
* @param {number} client
|
|
|
|
|
* @param {number} clock write structs starting with `ID(client,clock)`
|
2019-04-11 13:18:35 +02:00
|
|
|
*
|
|
|
|
|
* @function
|
2019-03-26 01:14:15 +01:00
|
|
|
*/
|
2019-04-10 18:52:22 +02:00
|
|
|
const writeStructs = (encoder, structs, client, clock) => {
|
|
|
|
|
// write first id
|
|
|
|
|
const startNewStructs = findIndexSS(structs, clock)
|
|
|
|
|
// write # encoded structs
|
|
|
|
|
encoding.writeVarUint(encoder, structs.length - startNewStructs)
|
|
|
|
|
writeID(encoder, createID(client, clock))
|
|
|
|
|
const firstStruct = structs[startNewStructs]
|
|
|
|
|
// write first struct with an offset
|
2020-06-02 23:20:45 +02:00
|
|
|
firstStruct.write(encoder, clock - firstStruct.id.clock)
|
2019-04-10 18:52:22 +02:00
|
|
|
for (let i = startNewStructs + 1; i < structs.length; i++) {
|
2020-06-02 23:20:45 +02:00
|
|
|
structs[i].write(encoder, 0)
|
2019-04-10 18:52:22 +02:00
|
|
|
}
|
|
|
|
|
}
|
2019-04-02 23:08:58 +02:00
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* @param {encoding.Encoder} encoder
|
|
|
|
|
* @param {StructStore} store
|
2019-04-11 13:18:35 +02:00
|
|
|
* @param {Map<number,number>} _sm
|
|
|
|
|
*
|
|
|
|
|
* @private
|
|
|
|
|
* @function
|
2019-04-02 23:08:58 +02:00
|
|
|
*/
|
2019-04-10 18:52:22 +02:00
|
|
|
export const writeClientsStructs = (encoder, store, _sm) => {
|
2019-04-05 00:37:09 +02:00
|
|
|
// we filter all valid _sm entries into sm
|
|
|
|
|
const sm = new Map()
|
|
|
|
|
_sm.forEach((clock, client) => {
|
2019-04-07 23:08:08 +02:00
|
|
|
// only write if new structs are available
|
2019-04-05 00:37:09 +02:00
|
|
|
if (getState(store, client) > clock) {
|
|
|
|
|
sm.set(client, clock)
|
|
|
|
|
}
|
|
|
|
|
})
|
2019-05-07 13:44:23 +02:00
|
|
|
getStateVector(store).forEach((clock, client) => {
|
2019-04-05 00:37:09 +02:00
|
|
|
if (!_sm.has(client)) {
|
|
|
|
|
sm.set(client, 0)
|
|
|
|
|
}
|
|
|
|
|
})
|
2019-04-02 23:08:58 +02:00
|
|
|
// write # states that were updated
|
|
|
|
|
encoding.writeVarUint(encoder, sm.size)
|
2020-06-09 00:53:05 +02:00
|
|
|
// Write items with higher client ids first
|
|
|
|
|
// This heavily improves the conflict algorithm.
|
|
|
|
|
Array.from(sm.entries()).sort((a, b) => b[0] - a[0]).forEach(([client, clock]) => {
|
2019-03-29 01:02:44 +01:00
|
|
|
// @ts-ignore
|
2019-04-10 18:52:22 +02:00
|
|
|
writeStructs(encoder, store.clients.get(client), client, clock)
|
2019-03-29 01:02:44 +01:00
|
|
|
})
|
|
|
|
|
}
|
2019-04-02 23:08:58 +02:00
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* @param {decoding.Decoder} decoder The decoder object to read data from.
|
2020-06-09 00:53:05 +02:00
|
|
|
* @param {Map<number,Array<GC|Item>>} clientRefs
|
|
|
|
|
* @param {Doc} doc
|
|
|
|
|
* @return {Map<number,Array<GC|Item>>}
|
2019-04-11 13:18:35 +02:00
|
|
|
*
|
|
|
|
|
* @private
|
|
|
|
|
* @function
|
2019-04-02 23:08:58 +02:00
|
|
|
*/
|
2020-06-09 00:53:05 +02:00
|
|
|
export const readClientsStructRefs = (decoder, clientRefs, doc) => {
|
2019-04-10 18:52:22 +02:00
|
|
|
const numOfStateUpdates = decoding.readVarUint(decoder)
|
|
|
|
|
for (let i = 0; i < numOfStateUpdates; i++) {
|
|
|
|
|
const numberOfStructs = decoding.readVarUint(decoder)
|
2019-04-02 23:08:58 +02:00
|
|
|
const nextID = readID(decoder)
|
2020-06-02 23:20:45 +02:00
|
|
|
const nextIdClient = nextID.client
|
|
|
|
|
let nextIdClock = nextID.clock
|
|
|
|
|
/**
|
2020-06-09 00:53:05 +02:00
|
|
|
* @type {Array<GC|Item>}
|
2020-06-02 23:20:45 +02:00
|
|
|
*/
|
|
|
|
|
const refs = []
|
|
|
|
|
clientRefs.set(nextIdClient, refs)
|
|
|
|
|
for (let i = 0; i < numberOfStructs; i++) {
|
|
|
|
|
const info = decoding.readUint8(decoder)
|
2020-06-09 00:53:05 +02:00
|
|
|
const id = createID(nextIdClient, nextIdClock)
|
|
|
|
|
const struct = (binary.BITS5 & info) === 0 ? new GC(id, decoding.readVarUint(decoder)) : readItem(decoder, id, info, doc)
|
|
|
|
|
refs.push(struct)
|
|
|
|
|
nextIdClock += struct.length
|
2020-06-02 23:20:45 +02:00
|
|
|
}
|
2019-04-02 23:08:58 +02:00
|
|
|
}
|
2019-04-10 18:52:22 +02:00
|
|
|
return clientRefs
|
2019-04-07 23:08:08 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Resume computing structs generated by struct readers.
|
|
|
|
|
*
|
|
|
|
|
* While there is something to do, we integrate structs in this order
|
|
|
|
|
* 1. top element on stack, if stack is not empty
|
|
|
|
|
* 2. next element from current struct reader (if empty, use next struct reader)
|
|
|
|
|
*
|
|
|
|
|
* If struct causally depends on another struct (ref.missing), we put next reader of
|
|
|
|
|
* `ref.id.client` on top of stack.
|
|
|
|
|
*
|
|
|
|
|
* At some point we find a struct that has no causal dependencies,
|
|
|
|
|
* then we start emptying the stack.
|
|
|
|
|
*
|
|
|
|
|
* It is not possible to have circles: i.e. struct1 (from client1) depends on struct2 (from client2)
|
|
|
|
|
* depends on struct3 (from client1). Therefore the max stack size is eqaul to `structReaders.length`.
|
|
|
|
|
*
|
|
|
|
|
* This method is implemented in a way so that we can resume computation if this update
|
|
|
|
|
* causally depends on another update.
|
|
|
|
|
*
|
|
|
|
|
* @param {Transaction} transaction
|
|
|
|
|
* @param {StructStore} store
|
|
|
|
|
*
|
2019-04-11 13:18:35 +02:00
|
|
|
* @private
|
|
|
|
|
* @function
|
2019-04-07 23:08:08 +02:00
|
|
|
*/
|
2019-04-10 18:52:22 +02:00
|
|
|
const resumeStructIntegration = (transaction, store) => {
|
|
|
|
|
const stack = store.pendingStack
|
|
|
|
|
const clientsStructRefs = store.pendingClientsStructRefs
|
2019-04-07 23:08:08 +02:00
|
|
|
// iterate over all struct readers until we are done
|
2019-04-10 18:52:22 +02:00
|
|
|
while (stack.length !== 0 || clientsStructRefs.size !== 0) {
|
2019-04-07 23:08:08 +02:00
|
|
|
if (stack.length === 0) {
|
2019-04-10 18:52:22 +02:00
|
|
|
// take any first struct from clientsStructRefs and put it on the stack
|
|
|
|
|
const [client, structRefs] = clientsStructRefs.entries().next().value
|
|
|
|
|
stack.push(structRefs.refs[structRefs.i++])
|
|
|
|
|
if (structRefs.refs.length === structRefs.i) {
|
|
|
|
|
clientsStructRefs.delete(client)
|
2019-04-07 23:08:08 +02:00
|
|
|
}
|
2019-04-10 18:52:22 +02:00
|
|
|
}
|
|
|
|
|
const ref = stack[stack.length - 1]
|
2020-06-02 23:20:45 +02:00
|
|
|
const refID = ref.id
|
|
|
|
|
const client = refID.client
|
|
|
|
|
const refClock = refID.clock
|
2019-04-10 18:52:22 +02:00
|
|
|
const localClock = getState(store, client)
|
2020-06-02 23:20:45 +02:00
|
|
|
const offset = refClock < localClock ? localClock - refClock : 0
|
2020-06-09 00:53:05 +02:00
|
|
|
const missing = ref.getMissing(transaction, store)
|
2020-06-02 23:20:45 +02:00
|
|
|
if (refClock + offset !== localClock) {
|
2019-04-10 18:52:22 +02:00
|
|
|
// A previous message from this client is missing
|
|
|
|
|
// check if there is a pending structRef with a smaller clock and switch them
|
|
|
|
|
const structRefs = clientsStructRefs.get(client)
|
|
|
|
|
if (structRefs !== undefined) {
|
|
|
|
|
const r = structRefs.refs[structRefs.i]
|
2020-06-02 23:20:45 +02:00
|
|
|
if (r.id.clock < refClock) {
|
2019-04-10 18:52:22 +02:00
|
|
|
// put ref with smaller clock on stack instead and continue
|
|
|
|
|
structRefs.refs[structRefs.i] = ref
|
|
|
|
|
stack[stack.length - 1] = r
|
|
|
|
|
// sort the set because this approach might bring the list out of order
|
2019-05-14 15:21:34 +02:00
|
|
|
structRefs.refs = structRefs.refs.slice(structRefs.i).sort((r1, r2) => r1.id.clock - r2.id.clock)
|
2019-04-10 18:52:22 +02:00
|
|
|
structRefs.i = 0
|
|
|
|
|
continue
|
2019-04-02 23:08:58 +02:00
|
|
|
}
|
2019-04-07 23:08:08 +02:00
|
|
|
}
|
2019-04-10 18:52:22 +02:00
|
|
|
// wait until missing struct is available
|
|
|
|
|
return
|
|
|
|
|
}
|
2020-06-09 00:53:05 +02:00
|
|
|
if (missing) {
|
|
|
|
|
const client = missing.client
|
|
|
|
|
// get the struct reader that has the missing struct
|
|
|
|
|
const structRefs = clientsStructRefs.get(client)
|
|
|
|
|
if (structRefs === undefined) {
|
|
|
|
|
// This update message causally depends on another update message.
|
|
|
|
|
return
|
2019-04-07 23:08:08 +02:00
|
|
|
}
|
2020-06-09 00:53:05 +02:00
|
|
|
stack.push(structRefs.refs[structRefs.i++])
|
|
|
|
|
if (structRefs.i === structRefs.refs.length) {
|
|
|
|
|
clientsStructRefs.delete(client)
|
|
|
|
|
}
|
|
|
|
|
} else {
|
2019-04-10 18:52:22 +02:00
|
|
|
if (offset < ref.length) {
|
2020-06-09 00:53:05 +02:00
|
|
|
ref.integrate(transaction, offset)
|
2019-04-02 23:08:58 +02:00
|
|
|
}
|
2019-04-10 18:52:22 +02:00
|
|
|
stack.pop()
|
2019-04-02 23:08:58 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-04-07 23:08:08 +02:00
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* @param {Transaction} transaction
|
|
|
|
|
* @param {StructStore} store
|
2019-04-11 13:18:35 +02:00
|
|
|
*
|
|
|
|
|
* @private
|
|
|
|
|
* @function
|
2019-04-07 23:08:08 +02:00
|
|
|
*/
|
|
|
|
|
export const tryResumePendingDeleteReaders = (transaction, store) => {
|
|
|
|
|
const pendingReaders = store.pendingDeleteReaders
|
|
|
|
|
store.pendingDeleteReaders = []
|
|
|
|
|
for (let i = 0; i < pendingReaders.length; i++) {
|
2019-09-03 16:33:29 +02:00
|
|
|
readAndApplyDeleteSet(pendingReaders[i], transaction, store)
|
2019-04-07 23:08:08 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-04-10 18:52:22 +02:00
|
|
|
/**
|
|
|
|
|
* @param {encoding.Encoder} encoder
|
|
|
|
|
* @param {Transaction} transaction
|
2019-04-11 13:18:35 +02:00
|
|
|
*
|
|
|
|
|
* @private
|
|
|
|
|
* @function
|
2019-04-10 18:52:22 +02:00
|
|
|
*/
|
2019-05-07 13:44:23 +02:00
|
|
|
export const writeStructsFromTransaction = (encoder, transaction) => writeClientsStructs(encoder, transaction.doc.store, transaction.beforeState)
|
2019-04-10 18:52:22 +02:00
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* @param {StructStore} store
|
2020-06-09 00:53:05 +02:00
|
|
|
* @param {Map<number, Array<GC|Item>>} clientsStructsRefs
|
2019-04-11 13:18:35 +02:00
|
|
|
*
|
|
|
|
|
* @private
|
|
|
|
|
* @function
|
2019-04-10 18:52:22 +02:00
|
|
|
*/
|
|
|
|
|
const mergeReadStructsIntoPendingReads = (store, clientsStructsRefs) => {
|
|
|
|
|
const pendingClientsStructRefs = store.pendingClientsStructRefs
|
|
|
|
|
for (const [client, structRefs] of clientsStructsRefs) {
|
|
|
|
|
const pendingStructRefs = pendingClientsStructRefs.get(client)
|
|
|
|
|
if (pendingStructRefs === undefined) {
|
2019-04-11 13:18:35 +02:00
|
|
|
pendingClientsStructRefs.set(client, { refs: structRefs, i: 0 })
|
2019-04-10 18:52:22 +02:00
|
|
|
} else {
|
|
|
|
|
// merge into existing structRefs
|
|
|
|
|
const merged = pendingStructRefs.i > 0 ? pendingStructRefs.refs.slice(pendingStructRefs.i) : pendingStructRefs.refs
|
|
|
|
|
for (let i = 0; i < structRefs.length; i++) {
|
|
|
|
|
merged.push(structRefs[i])
|
|
|
|
|
}
|
|
|
|
|
pendingStructRefs.i = 0
|
|
|
|
|
pendingStructRefs.refs = merged.sort((r1, r2) => r1.id.clock - r2.id.clock)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-04-07 23:08:08 +02:00
|
|
|
/**
|
|
|
|
|
* Read the next Item in a Decoder and fill this Item with the read data.
|
|
|
|
|
*
|
|
|
|
|
* This is called when data is received from a remote peer.
|
|
|
|
|
*
|
|
|
|
|
* @param {decoding.Decoder} decoder The decoder object to read data from.
|
|
|
|
|
* @param {Transaction} transaction
|
|
|
|
|
* @param {StructStore} store
|
|
|
|
|
*
|
|
|
|
|
* @private
|
2019-04-11 13:18:35 +02:00
|
|
|
* @function
|
2019-04-07 23:08:08 +02:00
|
|
|
*/
|
|
|
|
|
export const readStructs = (decoder, transaction, store) => {
|
2020-06-02 23:20:45 +02:00
|
|
|
const clientsStructRefs = new Map()
|
2020-06-09 00:53:05 +02:00
|
|
|
readClientsStructRefs(decoder, clientsStructRefs, transaction.doc)
|
2019-04-10 18:52:22 +02:00
|
|
|
mergeReadStructsIntoPendingReads(store, clientsStructRefs)
|
|
|
|
|
resumeStructIntegration(transaction, store)
|
2019-04-07 23:08:08 +02:00
|
|
|
tryResumePendingDeleteReaders(transaction, store)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2019-05-07 13:44:23 +02:00
|
|
|
* Read and apply a document update.
|
|
|
|
|
*
|
|
|
|
|
* This function has the same effect as `applyUpdate` but accepts an decoder.
|
|
|
|
|
*
|
2019-04-07 23:08:08 +02:00
|
|
|
* @param {decoding.Decoder} decoder
|
2019-05-07 13:44:23 +02:00
|
|
|
* @param {Doc} ydoc
|
|
|
|
|
* @param {any} [transactionOrigin] This will be stored on `transaction.origin` and `.on('update', (update, origin))`
|
|
|
|
|
*
|
|
|
|
|
* @function
|
|
|
|
|
*/
|
|
|
|
|
export const readUpdate = (decoder, ydoc, transactionOrigin) =>
|
2019-09-17 18:53:59 +02:00
|
|
|
transact(ydoc, transaction => {
|
2019-05-07 13:44:23 +02:00
|
|
|
readStructs(decoder, transaction, ydoc.store)
|
2019-09-03 16:33:29 +02:00
|
|
|
readAndApplyDeleteSet(decoder, transaction, ydoc.store)
|
2019-09-17 18:53:59 +02:00
|
|
|
}, transactionOrigin, false)
|
2019-05-07 13:44:23 +02:00
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Apply a document update created by, for example, `y.on('update', update => ..)` or `update = encodeStateAsUpdate()`.
|
|
|
|
|
*
|
|
|
|
|
* This function has the same effect as `readUpdate` but accepts an Uint8Array instead of a Decoder.
|
|
|
|
|
*
|
|
|
|
|
* @param {Doc} ydoc
|
|
|
|
|
* @param {Uint8Array} update
|
|
|
|
|
* @param {any} [transactionOrigin] This will be stored on `transaction.origin` and `.on('update', (update, origin))`
|
|
|
|
|
*
|
|
|
|
|
* @function
|
|
|
|
|
*/
|
|
|
|
|
export const applyUpdate = (ydoc, update, transactionOrigin) =>
|
|
|
|
|
readUpdate(decoding.createDecoder(update), ydoc, transactionOrigin)
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Write all the document as a single update message. If you specify the state of the remote client (`targetStateVector`) it will
|
|
|
|
|
* only write the operations that are missing.
|
|
|
|
|
*
|
|
|
|
|
* @param {encoding.Encoder} encoder
|
|
|
|
|
* @param {Doc} doc
|
|
|
|
|
* @param {Map<number,number>} [targetStateVector] The state of the target that receives the update. Leave empty to write all known structs
|
|
|
|
|
*
|
|
|
|
|
* @function
|
|
|
|
|
*/
|
|
|
|
|
export const writeStateAsUpdate = (encoder, doc, targetStateVector = new Map()) => {
|
|
|
|
|
writeClientsStructs(encoder, doc.store, targetStateVector)
|
|
|
|
|
writeDeleteSet(encoder, createDeleteSetFromStructStore(doc.store))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Write all the document as a single update message that can be applied on the remote document. If you specify the state of the remote client (`targetState`) it will
|
|
|
|
|
* only write the operations that are missing.
|
|
|
|
|
*
|
|
|
|
|
* Use `writeStateAsUpdate` instead if you are working with lib0/encoding.js#Encoder
|
|
|
|
|
*
|
|
|
|
|
* @param {Doc} doc
|
|
|
|
|
* @param {Uint8Array} [encodedTargetStateVector] The state of the target that receives the update. Leave empty to write all known structs
|
|
|
|
|
* @return {Uint8Array}
|
2019-04-11 13:18:35 +02:00
|
|
|
*
|
|
|
|
|
* @function
|
2019-04-07 23:08:08 +02:00
|
|
|
*/
|
2019-05-07 13:44:23 +02:00
|
|
|
export const encodeStateAsUpdate = (doc, encodedTargetStateVector) => {
|
|
|
|
|
const encoder = encoding.createEncoder()
|
|
|
|
|
const targetStateVector = encodedTargetStateVector == null ? new Map() : decodeStateVector(encodedTargetStateVector)
|
|
|
|
|
writeStateAsUpdate(encoder, doc, targetStateVector)
|
|
|
|
|
return encoding.toUint8Array(encoder)
|
2019-04-07 23:08:08 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2019-05-07 13:44:23 +02:00
|
|
|
* Read state vector from Decoder and return as Map
|
|
|
|
|
*
|
|
|
|
|
* @param {decoding.Decoder} decoder
|
|
|
|
|
* @return {Map<number,number>} Maps `client` to the number next expected `clock` from that client.
|
|
|
|
|
*
|
|
|
|
|
* @function
|
|
|
|
|
*/
|
|
|
|
|
export const readStateVector = decoder => {
|
|
|
|
|
const ss = new Map()
|
|
|
|
|
const ssLength = decoding.readVarUint(decoder)
|
|
|
|
|
for (let i = 0; i < ssLength; i++) {
|
|
|
|
|
const client = decoding.readVarUint(decoder)
|
|
|
|
|
const clock = decoding.readVarUint(decoder)
|
|
|
|
|
ss.set(client, clock)
|
|
|
|
|
}
|
|
|
|
|
return ss
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Read decodedState and return State as Map.
|
|
|
|
|
*
|
|
|
|
|
* @param {Uint8Array} decodedState
|
|
|
|
|
* @return {Map<number,number>} Maps `client` to the number next expected `clock` from that client.
|
|
|
|
|
*
|
|
|
|
|
* @function
|
|
|
|
|
*/
|
|
|
|
|
export const decodeStateVector = decodedState => readStateVector(decoding.createDecoder(decodedState))
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Write State Vector to `lib0/encoding.js#Encoder`.
|
|
|
|
|
*
|
2019-04-07 23:08:08 +02:00
|
|
|
* @param {encoding.Encoder} encoder
|
2019-09-03 16:33:29 +02:00
|
|
|
* @param {Map<number,number>} sv
|
2019-05-07 13:44:23 +02:00
|
|
|
* @function
|
|
|
|
|
*/
|
2019-09-03 16:33:29 +02:00
|
|
|
export const writeStateVector = (encoder, sv) => {
|
|
|
|
|
encoding.writeVarUint(encoder, sv.size)
|
|
|
|
|
sv.forEach((clock, client) => {
|
|
|
|
|
encoding.writeVarUint(encoder, client)
|
|
|
|
|
encoding.writeVarUint(encoder, clock)
|
2019-05-07 13:44:23 +02:00
|
|
|
})
|
|
|
|
|
return encoder
|
|
|
|
|
}
|
|
|
|
|
|
2019-09-03 16:33:29 +02:00
|
|
|
/**
|
|
|
|
|
* Write State Vector to `lib0/encoding.js#Encoder`.
|
|
|
|
|
*
|
|
|
|
|
* @param {encoding.Encoder} encoder
|
|
|
|
|
* @param {Doc} doc
|
|
|
|
|
*
|
|
|
|
|
* @function
|
|
|
|
|
*/
|
|
|
|
|
export const writeDocumentStateVector = (encoder, doc) => writeStateVector(encoder, getStateVector(doc.store))
|
|
|
|
|
|
2019-05-07 13:44:23 +02:00
|
|
|
/**
|
|
|
|
|
* Encode State as Uint8Array.
|
|
|
|
|
*
|
|
|
|
|
* @param {Doc} doc
|
|
|
|
|
* @return {Uint8Array}
|
2019-04-11 13:18:35 +02:00
|
|
|
*
|
|
|
|
|
* @function
|
2019-04-07 23:08:08 +02:00
|
|
|
*/
|
2019-05-14 15:21:34 +02:00
|
|
|
export const encodeStateVector = doc => {
|
2019-05-07 13:44:23 +02:00
|
|
|
const encoder = encoding.createEncoder()
|
|
|
|
|
writeDocumentStateVector(encoder, doc)
|
|
|
|
|
return encoding.toUint8Array(encoder)
|
2019-04-07 23:08:08 +02:00
|
|
|
}
|