Full Code of braid-org/braidjs for AI

master 52803d16617e cached
102 files
895.6 KB
232.5k tokens
354 symbols
1 requests
Download .txt
Showing preview only (935K chars total). Download the full file or copy to clipboard to get everything.
Repository: braid-org/braidjs
Branch: master
Commit: 52803d16617e
Files: 102
Total size: 895.6 KB

Directory structure:
gitextract_8cg5elin/

├── .gitignore
├── antimatter/
│   ├── antimatter.js
│   ├── doc.html
│   ├── package.json
│   ├── readme.md
│   └── test.html
├── antimatter_ts/
│   ├── antimatter.js
│   ├── doc.html
│   ├── package.json
│   ├── random002.js
│   ├── readme.md
│   ├── src/
│   │   ├── antimatter_crdt.ts
│   │   ├── json_crdt.ts
│   │   └── sequence_crdt.ts
│   ├── test.html
│   └── tsconfig.json
├── antimatter_wiki/
│   ├── client.html
│   ├── package.json
│   ├── readme.md
│   └── server.js
├── braid-http/
│   ├── braid-http-client.js
│   ├── braid-http-server.js
│   ├── contributing.md
│   ├── demos/
│   │   ├── blog/
│   │   │   ├── README
│   │   │   ├── certificate
│   │   │   ├── client.html
│   │   │   ├── package.json
│   │   │   ├── private-key
│   │   │   └── server.js
│   │   └── chat/
│   │       ├── README
│   │       ├── certificate
│   │       ├── client.html
│   │       ├── package.json
│   │       ├── private-key
│   │       └── server.js
│   ├── index.js
│   ├── index.mjs
│   ├── package.json
│   ├── package.md
│   ├── readme.md
│   └── test/
│       ├── client.html
│       ├── readme.md
│       ├── server.js
│       ├── test-request.txt
│       └── test-responses.txt
├── json-patch/
│   ├── apply-patch.js
│   ├── package.json
│   ├── readme.md
│   └── test.js
├── kernel/
│   ├── antimatter.js
│   ├── demos/
│   │   ├── simple/
│   │   │   ├── simple-client.html
│   │   │   └── simple-server.js
│   │   ├── sync9-chat/
│   │   │   ├── chat-server.js
│   │   │   ├── chat.css
│   │   │   ├── chat.html
│   │   │   ├── chat.js
│   │   │   ├── client.js
│   │   │   ├── mobile.css
│   │   │   ├── package.json
│   │   │   ├── settings.css
│   │   │   ├── settings.html
│   │   │   └── worker.js
│   │   └── wiki/
│   │       ├── wiki-client.html
│   │       └── wiki-server.js
│   ├── errors.js
│   ├── http-client.js
│   ├── http-server.js
│   ├── leadertab-shell.js
│   ├── llww.js
│   ├── node.js
│   ├── package.json
│   ├── pipe.js
│   ├── readme.md
│   ├── sqlite-store.js
│   ├── store.js
│   ├── test/
│   │   ├── tests.js
│   │   ├── virtual-p2p.js
│   │   ├── websocket-test.js
│   │   ├── wiki-perf.html
│   │   └── wiki-tester.js
│   ├── websocket-client.js
│   └── websocket-server.js
├── readme.md
├── simple_d_ton/
│   ├── index.js
│   └── package.json
├── simpleton/
│   ├── client.js
│   ├── demo.js
│   ├── index.js
│   ├── index.mjs
│   ├── package.json
│   └── server.js
├── sync9/
│   ├── old-vis/
│   │   ├── visualization.html
│   │   └── visualization.js
│   └── sync9.js
├── util/
│   ├── apply-patch.js
│   ├── braid-bundler.js
│   ├── diff.js
│   ├── require.js
│   └── utilities.js
└── yarnball/
    ├── server.js
    ├── yarnball.html
    └── yarnball.js

================================================
FILE CONTENTS
================================================

================================================
FILE: .gitignore
================================================
# Any certificates
certs/
certificates/
*.pem

# Database stuff
db.sqlite*

# Builds
braid-bundle.js
builds/

# VS Code
.vscode/

# Basic Nodejs Gitignore
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*

# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json

# Runtime data
pids
*.pid
*.seed
*.pid.lock

# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov

# Coverage directory used by tools like istanbul
coverage
*.lcov

# nyc test coverage
.nyc_output

# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt

# Bower dependency directory (https://bower.io/)
bower_components

# node-waf configuration
.lock-wscript

# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release

# Dependency directories
node_modules/
jspm_packages/

# Snowpack dependency directory (https://snowpack.dev/)
web_modules/

# TypeScript cache
*.tsbuildinfo

# Optional npm cache directory
.npm

# Optional eslint cache
.eslintcache

# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/

# Optional REPL history
.node_repl_history

# Output of 'npm pack'
*.tgz

# Yarn Integrity file
.yarn-integrity

# dotenv environment variables file
.env
.env.test

# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache

# Next.js build output
.next
out

# Nuxt.js build / generate output
.nuxt
dist

# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public

# vuepress build output
.vuepress/dist

# Serverless directories
.serverless/

# FuseBox cache
.fusebox/

# DynamoDB Local files
.dynamodb/

# TernJS port file
.tern-port

# Stores VSCode versions used for testing VSCode extensions
.vscode-test

# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*

# Mike isn't into package-lock, but feel free to disagree with him
package-lock.json

# antimatter wiki db files
antimatter_wiki_db/
antimatter_wiki_db.*
antimatter_wiki.*
april-db-backup
jan-db-backup-2025
db
server.sh

# apple
.DS_Store


================================================
FILE: antimatter/antimatter.js
================================================
/// # Software Architecture
/// The software is architected into three objects:
///
/// ``` js
/// var {create_antimatter_crdt, create_json_crdt, sequence_crdt} = require('@braidjs/antimatter') 
/// ```

// v522

/// - *antimatter_crdt*: created using `create_antimatter_crdt`, this object is a json_crdt with antimatter algorithm methods added to it so that it can communicate with other peers to learn which history can be pruned, and tells the underlying json_crdt object to prune it.
var create_antimatter_crdt;

/// - *json_crdt*: created using `create_json_crdt`, this object is a pruneable
///   JSON CRDT — "JSON" meaning it represents an arbitrary JSON datstructure, and
///   "CRDT" and "pruneable" having the same meaning as for sequence_crdt below. The
///   json_crdt makes recursive use of sequence_crdt structures to represent
///   arbitrary JSON (for instance, a map is represented with a sequence_crdt
///   structure for each value, where the first element in the sequence is the
///   value).
var create_json_crdt;

/// - *sequence_crdt*: methods to manipulate a pruneable sequence CRDT —
///   "sequence" meaning it represents a javascript string or array, "CRDT" meaning
///   this structure can be merged with other ones, and "pruneable" meaning that it
///   supports an operation to remove meta-data when it is no longer needed (whereas
///   CRDT's often keep track of this meta-data forever).
var sequence_crdt = {};

(() => {
  /// # create_antimatter_crdt(send[, init])
  ///
  /// Creates and returns a new antimatter_crdt object (or adds antimatter_crdt methods and properties to `init`).
  ///
  /// * `send`: A callback function to be called whenever this antimatter_crdt wants to send a
  ///   message over a connection registered with `subscribe`. The sole
  ///   parameter to this function is a JSONafiable object that hopes to be passed to
  ///   the `receive` method on the antimatter_crdt object at the other end of the
  ///   connection specified in the `conn` key.
  /// * `get_time`: function that returns a number representing time (e.g. `Date.now()`)
  /// * `set_timeout`: function that takes a callback and timeout length, and calls that callback after that amount of time; also returns an identifier that can be passed to `clear_timeout` to cancel the timeout (e.g. wrapping the javascript setTimeout)
  /// * `clear_timeout`: function that takes a timeout identifier an cancels it (e.g. wrapping the javascript clearTimeout)
  /// * `init`: (optional) An antimatter_crdt object to start with, which we'll add any properties to that it doesn't have, and we'll add all the antimatter_crdt methods to it. This option exists so you can serialize an antimatter_crdt instance as JSON, and then restore it later. 
  /// ``` js
  /// var antimatter_crdt = create_antimatter_crdt(msg => {
  ///     websockets[msg.conn].send(JSON.stringify(msg))
  ///   },
  ///   () => Date.now(),
  ///   (func, t) => setTimeout(func, t),
  ///   (t) => clearTimeout(t)),
  ///.  JSON.parse(fs.readFileSync('./antimatter.backup'))
  /// )
  /// ```
  create_antimatter_crdt = (
    send,
    get_time,
    set_timeout,
    clear_timeout,
    self
  ) => {
    self = create_json_crdt(self);
    self.send = send;
    // purposely not:
    // self.id = self.id || Math.random().toString(36).slice(2);
    // to accomodate an id of numeric 0
    if (self.id === undefined) self.id = Math.random().toString(36).slice(2);
    self.next_seq = self.next_seq || 0;

    self.conns = self.conns || {};
    self.proto_conns = self.proto_conns || {};
    self.conn_count = self.conn_count || 0;

    self.fissures = self.fissures || {};
    self.acked_boundary = self.acked_boundary || {};
    self.ackmes = self.ackmes || {};
    self.forget_cbs = self.forget_cbs || {};

    self.version_groups = self.version_groups || {};

    self.ackme_map = self.ackme_map || {};
    self.ackme_time_est_1 = self.ackme_time_est_1 || 1000;
    self.ackme_time_est_2 = self.ackme_time_est_2 || 1000;
    self.ackme_current_wait_time = self.ackme_current_wait_time || 1000;
    self.ackme_increases_allowed = 1;
    self.ackme_timeout = self.ackme_timeout || null;

    function raw_add_version_group(version_array) {
      let version_map = {};
      for (let v of version_array) {
        if (version_map[v]) continue;
        version_map[v] = true;
        if (self.version_groups[v]) self.version_groups[v].forEach((v) => (version_map[v] = true));
      }
      let version_group = Object.keys(version_map).sort();
      version_group.forEach((v) => (self.version_groups[v] = version_group));
      return version_group;
    }

    function get_parent_and_child_sets(children) {
      let parent_sets = {};
      let child_sets = {};
      let done = {};
      function add_set_to_sets(s, sets, mark_done) {
        let container = { members: s };
        let array = Object.keys(s);
        if (array.length < 2) return;
        for (let v of array) {
          sets[v] = container;
          if (mark_done) done[v] = true;
        }
      }
      add_set_to_sets(self.current_version, parent_sets, true);
      for (let v of Object.keys(self.T)) {
        if (done[v]) continue;
        done[v] = true;
        if (!children[v]) continue;
        let first_child_set = children[v];
        let first_child_array = Object.keys(first_child_set);
        let first_parent_set = self.T[first_child_array[0]];
        let first_parent_array = Object.keys(first_parent_set);
        if (
          first_child_array.every((child) => {
            let parent_set = self.T[child];
            let parent_array = Object.keys(parent_set);
            return (
              parent_array.length == first_parent_array.length &&
              parent_array.every((parent) => first_parent_set[parent])
            );
          }) &&
          first_parent_array.every((parent) => {
            let child_set = children[parent];
            let child_array = Object.keys(child_set);
            return (
              child_array.length == first_child_array.length &&
              child_array.every((child) => first_child_set[child])
            );
          })
        ) {
          add_set_to_sets(first_parent_set, parent_sets, true);
          add_set_to_sets(first_child_set, child_sets);
        }
      }
      return { parent_sets, child_sets };
    }

    function find_one_bubble(bottom, children, child_sets, restricted) {
      let expecting = { ...bottom };
      let seen = {};
      Object.keys(bottom).forEach(
        (v) =>
          children[v] &&
          Object.keys(children[v]).forEach((v) => (seen[v] = true))
      );
      let q = Object.keys(expecting);
      let last_top = null;
      while (q.length) {
        cur = q.shift();
        if (!self.T[cur]) {
          if (!restricted) throw "bad";
          else return last_top;
        }
        if (restricted && restricted[cur]) return last_top;

        if (seen[cur]) continue;

        if (children[cur] && !Object.keys(children[cur]).every((c) => seen[c]))
          continue;
        seen[cur] = true;
        delete expecting[cur];

        if (!Object.keys(expecting).length) {
          last_top = { [cur]: true };
          if (!restricted) return last_top;
        }

        Object.keys(self.T[cur]).forEach((p) => {
          expecting[p] = true;
          q.push(p);
        });

        if (
          child_sets[cur] &&
          Object.keys(child_sets[cur].members).every((v) => seen[v])
        ) {
          let expecting_array = Object.keys(expecting);
          let parent_set = self.T[cur];
          let parent_array = Object.keys(parent_set);
          if (
            expecting_array.length == parent_array.length &&
            expecting_array.every((v) => parent_set[v])
          ) {
            last_top = child_sets[cur].members;
            if (!restricted) return last_top;
          }
        }
      }
      return last_top;
    }

    function add_version_group(version_array) {
      let version_group = raw_add_version_group(version_array);
      if (!version_array.some((x) => self.T[x])) return version_group[0];

      let children = self.get_child_map();
      let { parent_sets, child_sets } = get_parent_and_child_sets(children);

      let to_bubble = {};
      function mark_bubble(v, bubble) {
        if (to_bubble[v]) return;
        to_bubble[v] = bubble;
        for (let vv of Object.keys(self.T[v])) mark_bubble(vv, bubble);
      }

      let bottom = Object.fromEntries(
        version_group.filter((x) => self.T[x]).map((x) => [x, true])
      );
      let top = find_one_bubble(bottom, children, child_sets);
      let bubble = [Object.keys(bottom).sort()[0], Object.keys(top)[0]];
      for (let v of Object.keys(top)) to_bubble[v] = bubble;
      for (let v of Object.keys(bottom)) mark_bubble(v, bubble);

      self.apply_bubbles(to_bubble);
      return version_group[0];
    }

    let orig_send = send;
    send = (x) => {
      if (self.version_groups[x.version])
        x.version = self.version_groups[x.version];
      if (x.parents) {
        x.parents = { ...x.parents };
        Object.keys(x.parents).forEach((v) =>
          self.version_groups[v] && self.version_groups[v].forEach((v) => (x.parents[v] = true))
        );
      }
      if (Array.isArray(x.versions)) {
        x.versions = JSON.parse(JSON.stringify(x.versions));
        x.versions.forEach(
          (v) =>
            self.version_groups[v.version] &&
            (v.version = self.version_groups[v.version])
        );
        x.versions.forEach((v) => {
          Object.keys(v.parents).forEach((vv) =>
            self.version_groups[vv] && self.version_groups[vv].forEach((vv) => (v.parents[vv] = true))
          );
        });
      }

      orig_send(x);
    };

    /// # antimatter_crdt.receive(message)
    ///
    /// Let this antimatter object "receive" a message from another antimatter object, presumably from its `send` callback.
    /// ``` js
    /// websocket.on('message', data => {
    ///     antimatter_crdt.receive(JSON.parse(data)) });
    /// ```
    /// You generally do not need to mess with a message object directly, but below are the various message objects you might see, categorized by their `type` entry. Note that each object also
    ///   contains a `conn` entry with the id of the connection the message is sent
    ///   over.
    self.receive = (x) => {
      let {
        type,
        version,
        parents,
        patches,
        versions,
        fissure,
        fissures,
        seen,
        forget,
        ackme,
        peer,
        conn,
      } = x;

      if (version && typeof version != "string") {
        if (!self.T[version[0]]) version = add_version_group(version);
        else version = version[0];
      }
      if (parents) {
        parents = { ...parents };
        Object.keys(parents).forEach((v) => {
          if (self.version_groups[v] && self.version_groups[v][0] != v)
            delete parents[v];
        });
      }

      if (versions && versions.forEach) versions.forEach((v) => {
        if (typeof v.version != "string") {
          if (!self.T[v.version[0]]) v.version = add_version_group(v.version);
          else v.version = v.version[0];
        }
        v.parents = { ...v.parents };
        Object.keys(v.parents).forEach((vv) => {
          if (self.version_groups[vv] && self.version_groups[vv][0] != vv)
            delete v.parents[vv];
        });
      });

      let ackme_versions_array = version
        ? [version]
        : versions && !Array.isArray(versions)
        ? Object.keys(versions).sort()
        : null;
      let ackme_versions =
        ackme_versions_array &&
        Object.fromEntries(ackme_versions_array.map((v) => [v, true]));

      if (versions && !Array.isArray(versions)) {
        versions = { ...versions };
        Object.keys(versions).forEach((v) => {
          if (self.version_groups[v] && self.version_groups[v][0] != v)
            delete versions[v];
        });
        if (!Object.keys(versions).length) return;
      }

      /// ## message `subscribe`
      /// `subscribe` is the first message sent over a connection, and the peer at the other end will respond with `welcome`.
      /// ``` js
      /// { type: 'subscribe',
      ///   peer: 'SENDER_ID',
      ///   conn: 'CONN_ID',
      ///   parents: {'PARENT_VERSION_ID': true, ...} }
      /// ```
      /// The `parents` are optional, and describes which versions this peer already has. The other end will respond with versions since that set of parents.
      if (type == "subscribe" || (type == "welcome" && peer != null)) {
        if (self.conns[conn] != null) throw Error("bad");
        self.conns[conn] = { peer, seq: ++self.conn_count };
      }

      /// ## message `fissure`
      ///
      /// Sent to alert peers about a fissure. The `fissure` entry contains information about the two peers involved in the fissure, the specific connection id that broke, the `versions` that need to be protected, and the `time` of the fissure (in case we want to ignore it after some time). It is also possible to send multiple `fissures` in an array.
      /// ``` js
      /// { type: 'fissure',
      ///   fissure: { // or fissures: [{...}, {...}, ...],
      ///     a: 'PEER_A_ID',
      ///     b:  'PEER_B_ID',
      ///     conn: 'CONN_ID',
      ///     versions: {'VERSION_ID': true, ...},
      ///     time: Date.now()
      ///   },
      ///   conn: 'CONN_ID' }
      /// ```
      /// Note that `time` isn't used for anything critical, as it's just wallclock time.
      if (fissure) fissures = [fissure];

      if (fissures) fissures = fissures.map((f) => {
        f = JSON.parse(JSON.stringify(f));
        f.t = self.conn_count;
        return f;
      });

      if (versions && (type == "update" || type == "welcome"))
        versions = Object.fromEntries(versions.map((v) => [v.version, v]));
      if (version) versions = { [version]: true };

      let rebased_patches = [];

      let fissures_back = [];
      let fissures_forward = [];
      let fissures_done = {};

      function copy_fissures(fs) {
        return fs.map((f) => {
          f = JSON.parse(JSON.stringify(f));
          delete f.t;
          return f;
        });
      }

      if (fissures) {
        let fiss_map = Object.fromEntries(
          fissures.map((f) => [f.a + ":" + f.b + ":" + f.conn, f])
        );
        for (let [key, f] of Object.entries(fiss_map)) {
          if (fissures_done[f.conn]) continue;
          fissures_done[f.conn] = true;

          let our_f = self.fissures[key];
          let other_key = f.b + ":" + f.a + ":" + f.conn;
          let their_other = fiss_map[other_key];
          let our_other = self.fissures[other_key];

          if (!our_f) self.fissures[key] = f;
          if (their_other && !our_other) self.fissures[other_key] = their_other;

          if (!their_other && !our_other && f.b == self.id && !self.conns[f.conn]) {
            our_other = self.fissures[other_key] = {
              ...f,
              a: f.b,
              b: f.a,
              t: self.conn_count,
            };
          }

          if (!their_other && our_other) {
            fissures_back.push(f);
            fissures_back.push(our_other);
          }

          if (!our_f || (their_other && !our_other)) {
            fissures_forward.push(f);
            if (their_other || our_other)
              fissures_forward.push(their_other || our_other);
          }
        }
      }

      /// ## message `welcome`
      /// Sent in response to a `subscribe`, basically contains the initial state of the document; incoming `welcome` messages are also propagated over all our other connections but only with information that was new to us, so the propagation will eventually stop. When sent in response to a `subscribe` (rather than being propagated), we include a `peer` entry with the id of the sending peer, so they know who we are, and to trigger them to send us their own  `welcome` message.
      ///
      /// ``` js
      /// {
      ///   type: 'welcome',
      ///   versions: [
      ///     //each version looks like an update message...
      ///   ],
      ///   fissures: [
      ///     //each fissure looks as it would in a fissure message...
      ///   ],
      ///   parents: 
      ///     {
      ///       //versions you must have before consuming these new versions
      ///       'PARENT_VERSION_ID': true,
      ///       ...
      ///     },
      ///   [peer: 'SENDER_ID'], // if responding to a subscribe
      ///   conn: 'CONN_ID'
      /// } 
      /// ```
      let _T = {};
      let added_versions = [];
      if (type == "welcome") {
        var versions_to_add = {};
        let vs = Object.values(versions);
        vs.forEach((v) => (versions_to_add[v.version] = v.parents));
        vs.forEach((v) => {
          if (
            self.T[v.version] ||
            (self.version_groups[v.version] &&
              self.version_groups[v.version][0] != v.version)
          ) {
            remove_ancestors(v.version);
            function remove_ancestors(v) {
              if (versions_to_add[v]) {
                Object.keys(versions_to_add[v]).forEach(remove_ancestors);
                delete versions_to_add[v];
              }
            }
          }
        });

        for (let v of vs) _T[v.version] = v.parents;

        l1: for (var v of vs) {
          if (versions_to_add[v.version]) {
            let ps = Object.keys(v.parents);

            if (!ps.length && Object.keys(self.T).length) continue;
            for (p of ps) if (!self.T[p]) continue l1;

            rebased_patches = rebased_patches.concat(
              self.add_version(v.version, v.parents, v.patches, v.sort_keys)
            );

            added_versions.push(v);
            delete _T[v.version];
          }
        }
      }

      if (type == "subscribe" || (type == "welcome" && peer != null)) {
        let fissures_back = Object.values(self.fissures);

        if (type == "welcome") {
          var leaves = { ..._T };
          Object.keys(_T).forEach((v) => {
            Object.keys(_T[v]).forEach((p) => delete leaves[p]);
          });

          let f = {
            a: self.id,
            b: peer,
            conn: "-" + conn,
            versions: Object.fromEntries(
              added_versions
                .concat(Object.keys(leaves).map((v) => versions[v]))
                .map((v) => [v.version, true])
            ),
            time: get_time(),
            t: self.conn_count,
          };
          if (Object.keys(f.versions).length) {
            let key = f.a + ":" + f.b + ":" + f.conn;
            self.fissures[key] = f;
            fissures_back.push(f);
            fissures_forward.push(f);
          }
        }

        send({
          type: "welcome",
          versions: self.generate_braid(parents || versions),
          fissures: copy_fissures(fissures_back),
          parents:
            parents &&
            Object.keys(parents).length &&
            self.get_leaves(self.ancestors(parents, true)),
          ...(type == "subscribe" ? { peer: self.id } : {}),
          conn,
        });
      } else if (fissures_back.length) {
        send({
          type: "fissure",
          fissures: copy_fissures(fissures_back),
          conn,
        });
      }

      /// ## message `forget`
      /// Used to disconnect without creating a fissure, presumably meaning the sending peer doesn't plan to make any edits while they're disconnected.
      /// ``` js
      /// {type: 'forget', conn: 'CONN_ID'}
      /// ```
      if (type == "forget") {
        if (self.conns[conn] == null) throw Error("bad");
        send({ type: "ack", forget: true, conn });

        delete self.conns[conn];
        delete self.proto_conns[conn];
      }

      /// ## message forget `ack` 
      /// Sent in response to `forget`.. so they know we forgot them.
      /// ``` js
      /// {type: 'ack', forget: true, conn: 'CONN_ID'}
      /// ```
      if (type == "ack" && forget) {
        self.forget_cbs[conn]();
      }

      /// ## message `update`
      /// Sent to alert peers about a change in the document. The change is represented as a version, with a unique id, a set of parent versions (the most recent versions known before adding this version), and an array of patches, where the offsets in the patches do not take into account the application of other patches in the same array.
      /// ``` js
      /// { type: 'update',
      ///   version: 'VERSION_ID',
      ///   parents: {'PARENT_VERSION_ID': true, ...},
      ///   patches: [ {range: '.json.path.a.b', content: 42}, ... ],
      ///   conn: 'CONN_ID' }
      /// ```
      if (type == "update") {
        if (conn == null || !self.T[version]) {
          let ps = Object.keys(parents);

          if (!ps.length && Object.keys(self.T).length) return;
          for (p of ps) if (!self.T[p]) return;

          rebased_patches = self.add_version(version, parents, patches);

          for (let c of Object.keys(self.conns))
            if (c != conn)
              send({ type: "update", version, parents, patches, ackme, conn: c });
        }
      }

      /// ## message `ackme`
      /// Sent for pruning purposes, to try and establish whether everyone has seen the most recent versions. Note that an `update` message is treated as a `ackme` message for the version in the update.
      /// ``` js
      /// { type: 'ackme',
      ///   version: 'ACKME_ID',
      ///   versions: {'VERSION_ID_A': true, ...},
      ///   conn: 'CONN_ID' }
      /// ```
      if (type == "ackme" || type == "update") {
        if (!Object.keys(versions).every((v) => self.T[v])) return;

        if (
          self.ackme_timeout &&
          ackme_versions_array.length ==
            Object.keys(self.current_version).length &&
          ackme_versions_array.every((x) => self.current_version[x])
        ) {
          clear_timeout(self.ackme_timeout);
          self.ackme_timeout = null;
        }

        let m = self.ackmes[ackme];
        if (!m) {
          m = self.ackmes[ackme] = {
            id: ackme,
            origin: conn,
            count: Object.keys(self.conns).length - (conn != null ? 1 : 0),
            versions: ackme_versions,
            seq: self.conn_count,
            time: get_time(),
          };
          m.orig_count = m.count;
          m.real_ackme = type == "ackme";
          m.key = JSON.stringify(Object.keys(m.versions).sort());
          self.ackme_map[m.key] = self.ackme_map[m.key] || {};
          let before = Object.keys(self.ackme_map[m.key]).length;
          self.ackme_map[m.key][m.id] = true;
          let after = Object.keys(self.ackme_map[m.key]).length;
          if (before == 1 && after == 2 && self.ackme_increases_allowed > 0) {
            self.ackme_current_wait_time *= 2;
            self.ackme_increases_allowed--;
          }

          if (type == "ackme")
            for (let c of Object.keys(self.conns))
              if (c != conn)
                send({
                  type: "ackme",
                  ackme,
                  versions: ackme_versions,
                  conn: c,
                });
        } else if (m.seq < self.conns[conn].seq) {
          send({
            type: "ack",
            seen: "local",
            ackme,
            versions: ackme_versions,
            conn,
          });
          return;
        } else m.count--;
        check_ackme_count(ackme);
      }

      /// ## message local `ack`
      /// Sent in response to `update`, but not right away; a peer will first send the `update` over all its other connections, and only after they have all responded with a local `ack` – and we didn't see a `fissure` message while waiting – will the peer send a local `ack` over the originating connection.
      /// ``` js
      /// {type: 'ack', seen: 'local', version: 'VERSION_ID', conn: 'CONN_ID'}
      /// ```
      if (type == "ack" && seen == "local") {
        let m = self.ackmes[ackme];
        if (!m || m.cancelled) return;
        m.count--;
        check_ackme_count(ackme);
      }
      function check_ackme_count(ackme) {
        let m = self.ackmes[ackme];
        if (m && m.count === 0 && !m.cancelled) {
          m.time2 = get_time();
          if (m.orig_count > 0) {
            let t = m.time2 - m.time;
            let weight = 0.1;
            self.ackme_time_est_1 =
              weight * t + (1 - weight) * self.ackme_time_est_1;
          }
          if (m.origin != null) {
            if (self.conns[m.origin])
              send({
                type: "ack",
                seen: "local",
                ackme,
                versions: ackme_versions,
                conn: m.origin,
              });
          } else add_full_ack_leaves(ackme);
        }
      }

      /// ## message global `ack`
      /// Sent after an originating peer has received a local `ack` over all its connections, or after any peer receives a global `ack`, so that everyone may come to know that this version has been seen by everyone in this peer group.
      /// ``` js
      /// {type: 'ack', seen: 'global', version: 'VERSION_ID', conn: 'CONN_ID'}
      /// ```
      if (type == "ack" && seen == "global") {
        let m = self.ackmes[ackme];

        if (!m || m.cancelled) return;

        let t = get_time() - m.time2;
        let weight = 0.1;
        self.ackme_time_est_2 =
          weight * t + (1 - weight) * self.ackme_time_est_2;

        if (m.real_ackme && Object.keys(self.ackme_map[m.key]).length == 1) {
          self.ackme_current_wait_time *= 0.8;
        }

        add_full_ack_leaves(ackme, conn);
      }
      function add_full_ack_leaves(ackme, conn) {
        let m = self.ackmes[ackme];
        if (!m || m.cancelled) return;
        m.cancelled = true;

        for (let [c, cc] of Object.entries(self.conns))
          if (c != conn && cc.seq <= m.seq)
            send({
              type: "ack",
              seen: "global",
              ackme,
              versions: ackme_versions,
              conn: c,
            });

        for (let v of Object.keys(m.versions)) {
          if (!self.T[v]) continue;
          let marks = {};
          let f = (v) => {
            if (!marks[v]) {
              marks[v] = true;
              delete self.acked_boundary[v];
              Object.keys(self.T[v]).forEach(f);
            }
          };
          f(v);
          self.acked_boundary[v] = true;
        }
        prune(false, m.seq);
      }

      if (added_versions.length || fissures_forward.length) {
        for (let c of Object.keys(self.conns))
          if (c != conn)
            send({
              type: added_versions.length ? "welcome" : "fissure",
              ...(added_versions.length ? { versions: added_versions } : {}),
              fissures: copy_fissures(fissures_forward),
              conn: c,
            });
      }

      if (fissures_forward.length) resolve_fissures();

      if (
        !self.ackme_timeout &&
        type != "update" &&
        type != "ackme" &&
        prune(true)
      ) {
        if (!self.ackme_current_wait_time) {
          self.ackme_current_wait_time =
            4 * (self.ackme_time_est_1 + self.ackme_time_est_2);
        }

        let t = Math.random() * self.ackme_current_wait_time;

        self.ackme_timeout = set_timeout(() => {
          self.ackme_increases_allowed = 1;
          self.ackme_timeout = null;
          if (prune(true)) self.ackme();
        }, t);
      }

      if (type == "welcome" && peer == null && prune(true, null, true))
        self.ackme();

      return rebased_patches;
    };

    /// # antimatter_crdt.subscribe(conn)
    ///
    /// Register a new connection with id `conn` – triggers this antimatter_crdt object to send a `subscribe` message over the given connection. 
    ///
    /// ``` js
    /// alice_antimatter_crdt.subscribe('connection_to_bob')
    /// ```
    self.subscribe = (conn) => {
      self.proto_conns[conn] = true;
      send({ type: "subscribe", peer: self.id, conn });
    };

    /// # antimatter_crdt.forget(conn)
    ///
    /// Disconnect the given connection without creating a fissure – we don't need to reconnect with them.. it seems.. if we do, then we need to call `disconnect` instead, which will create a fissure allowing us to reconnect.
    ///
    /// ``` js
    /// alice_antimatter_crdt.forget('connection_to_bob')
    /// ```
    self.forget = async (conn) => {
      await new Promise((done) => {
        if (self.conns[conn] != null) {
          self.forget_cbs[conn] = done;
          send({ type: "forget", conn });
        }
        self.disconnect(conn, false);
      });
    };

    /// # antimatter_crdt.disconnect(conn)
    ///
    /// If we detect that a connection has closed, let the antimatter_crdt object know by calling this method with the given connection id – this will create a fissure so we can reconnect with whoever was on the other end of the connection later on. 
    ///
    /// ``` js
    /// alice_antimatter_crdt.disconnect('connection_to_bob')
    /// ```
    self.disconnect = (conn, fissure = true) => {
      if (self.conns[conn] == null && !self.proto_conns[conn]) return;
      delete self.proto_conns[conn];

      if (self.conns[conn]) {
        let peer = self.conns[conn].peer;
        delete self.conns[conn];

        if (fissure) {
          fissure = create_fissure(peer, conn);
          if (fissure) self.receive({ type: "fissure", fissure });
        }
      }
    };

    /// # antimatter_crdt.update(...patches)
    ///
    /// Modify this antimatter_crdt object by applying the given patches. Each patch looks like `{range: '.life.meaning', content: 42}`. Calling this method will trigger calling the `send` callback to let our peers know about this change. 
    ///
    /// ``` js
    /// antimatter_crdt.update({
    ///   range: '.life.meaning',
    ///   content: 42
    /// })
    /// ```
    self.update = (...patches) => {
      var version = `${self.next_seq++}@${self.id}`;
      self.receive({
        type: "update",
        version,
        parents: { ...self.current_version },
        patches,
        ackme: Math.random().toString(36).slice(2),
      });
      return version;
    };

    /// # antimatter_crdt.ackme()
    ///
    /// Initiate sending a `ackme` message to try and establish whether certain versions can be pruned. 
    ///
    /// ``` js
    /// antimatter_crdt.ackme()
    /// ```
    self.ackme = () => {
      let versions = { ...self.current_version };
      Object.keys(versions).forEach((v) =>
        self.version_groups[v] && self.version_groups[v].forEach((v) => (versions[v] = true))
      );

      let ackme = Math.random().toString(36).slice(2);
      self.receive({ type: "ackme", ackme, versions });
      return ackme;
    };

    function cancel_ackmes() {
      for (let m of Object.values(self.ackmes)) m.cancelled = true;
    }

    function create_fissure(peer, conn) {
      let ack_versions = self.ancestors(self.acked_boundary);

      let entries = Object.keys(self.T)
        .filter((v) => !ack_versions[v] || self.acked_boundary[v])
        .map((v) => [v, true]);
      if (!entries.length) return;
      let versions = Object.fromEntries(entries);
      return { a: self.id, b: peer, conn, versions, time: get_time() };
    }

    function resolve_fissures() {
      let unfissured = {};

      Object.entries(self.fissures).forEach(([fk, f]) => {
        var other_key = f.b + ":" + f.a + ":" + f.conn;
        var other = self.fissures[other_key];
        if (other) {
          if (Object.keys(f.versions).length) {
            for (let v of Object.keys(f.versions)) unfissured[v] = true;
            self.fissures[fk] = { ...f, versions: {} };
          }
          if (Object.keys(other.versions).length) {
            for (let v of Object.keys(other.versions)) unfissured[v] = true;
            self.fissures[other_key] = { ...other, versions: {} };
          }
        }
      });

      if (Object.keys(unfissured).length) {
        cancel_ackmes();

        let ack_versions = self.ancestors(self.acked_boundary);
        let unfissured_descendants = self.descendants(unfissured, true);
        for (let un of Object.keys(unfissured_descendants))
          if (ack_versions[un]) delete ack_versions[un];
        self.acked_boundary = self.get_leaves(ack_versions);
      }
    }

    function prune(just_checking, t, just_versions) {
      if (just_checking) t = Infinity;

      let fissures = just_checking ? { ...self.fissures } : self.fissures;

      Object.entries(fissures).forEach((x) => {
        var other_key = x[1].b + ":" + x[1].a + ":" + x[1].conn;
        var other = fissures[other_key];
        if (other && x[1].t <= t && other.t <= t) {
          delete fissures[x[0]];
          delete fissures[other_key];
        }
      });

      if (self.fissure_lifetime != null) {
        var now = get_time();
        Object.entries(fissures).forEach(([k, f]) => {
          if (f.time == null) f.time = now;
          if (f.time <= now - self.fissure_lifetime) {
            delete fissures[k];
          }
        });
      }

      if (
        just_checking &&
        !just_versions &&
        Object.keys(fissures).length < Object.keys(self.fissures).length
      )
        return true;

      var restricted = {};

      Object.values(fissures).forEach((f) => {
        Object.keys(f.versions).forEach((v) => (restricted[v] = true));
      });

      if (!just_checking) {
        var acked = self.ancestors(self.acked_boundary);
        Object.keys(self.T).forEach((x) => {
          if (!acked[x]) restricted[x] = true;
        });
      }

      let children = self.get_child_map();
      let { parent_sets, child_sets } = get_parent_and_child_sets(children);

      let to_bubble = {};
      function mark_bubble(v, bubble) {
        if (to_bubble[v]) return;
        to_bubble[v] = bubble;
        for (let vv of Object.keys(self.T[v])) mark_bubble(vv, bubble);
      }
      let visited = {};
      function f(cur) {
        if (!self.T[cur] || visited[cur]) return;
        visited[cur] = true;

        if (
          to_bubble[cur] == null &&
          parent_sets[cur] &&
          !parent_sets[cur].done
        ) {
          parent_sets[cur].done = true;
          let bottom = parent_sets[cur].members;
          let top = find_one_bubble(bottom, children, child_sets, restricted);
          if (top) {
            if (just_checking) return true;
            let bottom_array = Object.keys(bottom).sort();
            let top_array = Object.keys(top);
            raw_add_version_group(bottom_array);
            let bubble = [bottom_array[0], top_array[0]];
            for (let v of top_array) to_bubble[v] = bubble;
            for (let v of bottom_array) mark_bubble(v, bubble);
          }
        }
        if (to_bubble[cur] == null) {
          let top = find_one_bubble(
            { [cur]: true },
            children,
            child_sets,
            restricted
          );
          if (top && !top[cur]) {
            if (just_checking) return true;
            let bubble = [cur, Object.keys(top)[0]];
            for (let v of Object.keys(top)) to_bubble[v] = bubble;
            mark_bubble(bubble[0], bubble);
          } else {
            to_bubble[cur] = [cur, cur];
          }
        }
        return Object.keys(
          self.T[cur] || self.T[self.version_groups[cur][0]]
        ).some(f);
      }
      if (Object.keys(self.current_version).some(f) && just_checking)
        return true;

      self.apply_bubbles(to_bubble);

      for (let [k, m] of Object.entries(self.ackmes)) {
        let vs = Object.keys(m.versions);
        if (
          !vs.length ||
          !vs.every((v) => self.T[v] || self.version_groups[v])
        ) {
          delete self.ackmes[k];
          delete self.ackme_map[m.key][m.id];
          if (!Object.keys(self.ackme_map[m.key]).length)
            delete self.ackme_map[m.key];
        }
      }

      for (let [v, vs] of Object.entries(self.version_groups)) {
        if (!self.T[vs[0]]) delete self.version_groups[v];
      }
    }

    return self;
  };

  /// ## create_json_crdt([init])
  ///
  /// Create a new `json_crdt` object (or start with `init`, and add stuff to that). 
  ///
  /// ``` js
  /// var json_crdt = create_json_crdt()
  /// ``` 
  create_json_crdt = (self) => {
    self = self || {};
    if (self.S === undefined) self.S = null;
    self.T = self.T || {};
    if (self.root_version === undefined) self.root_version = null;
    self.current_version = self.current_version || {};
    self.version_cache = self.version_cache || {};

    let is_lit = (x) => !x || typeof x != "object" || x.t == "lit";
    let get_lit = (x) => (x && typeof x == "object" && x.t == "lit" ? x.S : x);
    let make_lit = (x) => (x && typeof x == "object" ? { t: "lit", S: x } : x);

    /// # json_crdt.read()
    ///
    /// Returns an instance of the `json` object represented by this json_crdt data-structure. 
    ///
    /// ``` js
    /// console.log(json_crdt.read())
    /// ```
    self.read = (is_anc) => {
      if (!is_anc) is_anc = () => true;

      return raw_read(self.S, is_anc);
    };

    function raw_read(x, is_anc) {
      if (x && typeof x == "object") {
        if (x.t == "lit") return JSON.parse(JSON.stringify(x.S));
        if (x.t == "val")
          return raw_read(sequence_crdt.get(x.S, 0, is_anc), is_anc);
        if (x.t == "obj") {
          var o = {};
          Object.entries(x.S).forEach(([k, v]) => {
            var x = raw_read(v, is_anc);
            if (x != null) o[k] = x;
          });
          return o;
        }
        if (x.t == "arr") {
          var a = [];
          sequence_crdt.traverse(
            x.S,
            is_anc,
            (node, _, __, ___, ____, deleted) => {
              if (!deleted)
                node.elems.forEach((e) => a.push(raw_read(e, is_anc)));
            },
            true
          );
          return a;
        }
        if (x.t == "str") {
          var s = [];
          sequence_crdt.traverse(
            x.S,
            is_anc,
            (node, _, __, ___, ____, deleted) => {
              if (!deleted) s.push(node.elems);
            },
            true
          );
          return s.join("");
        }
        throw Error("bad");
      }
      return x;
    }

    /// # json_crdt.generate_braid(versions)
    ///
    /// Returns an array of `update` messages that each look like this: `{version, parents, patches, sort_keys}`, such that if we pass all these messages to `antimatter_crdt.receive()`, we'll reconstruct the data in this `json_crdt` data-structure, assuming the recipient already has the given `versions` (each version is represented as a key in an object, and each value is `true`).
    ///
    /// ``` js
    /// json_crdt.generate_braid({
    ///   alice2: true, 
    ///   bob3: true
    /// })
    /// ```
    self.generate_braid = (versions) => {
      var anc =
        versions && Object.keys(versions).length
          ? self.ancestors(versions, true)
          : {};
      var is_anc = (x) => anc[x];

      if (Object.keys(self.T).length === 0) return [];

      return Object.entries(self.version_cache)
        .filter((x) => !is_anc(x[0]))
        .map(([version, update_message]) => {
          return (self.version_cache[version] =
            update_message || generate_update_message(version));
        });

      function generate_update_message(version) {
        if (!Object.keys(self.T[version]).length) {
          return {
            version,
            parents: {},
            patches: [{ range: "", content: self.read((v) => v == version) }],
          };
        }

        var is_lit = (x) => !x || typeof x !== "object" || x.t === "lit";
        var get_lit = (x) =>
          x && typeof x === "object" && x.t === "lit" ? x.S : x;

        var ancs = self.ancestors({ [version]: true });
        delete ancs[version];
        var is_anc = (x) => ancs[x];
        var path = [];
        var patches = [];
        var sort_keys = {};
        recurse(self.S);
        function recurse(x) {
          if (is_lit(x)) {
          } else if (x.t === "val") {
            sequence_crdt
              .generate_braid(x.S, version, is_anc, raw_read)
              .forEach((s) => {
                if (s[2].length) {
                  patches.push({ range: path.join(""), content: s[2][0] });
                  if (s[3]) sort_keys[patches.length - 1] = s[3];
                }
              });
            sequence_crdt.traverse(x.S, is_anc, (node) => {
              node.elems.forEach(recurse);
            });
          } else if (x.t === "arr") {
            sequence_crdt.generate_braid(x.S, version, is_anc).forEach((s) => {
              patches.push({
                range: `${path.join("")}[${s[0]}:${s[0] + s[1]}]`,
                content: s[2],
              });
              if (s[3]) sort_keys[patches.length - 1] = s[3];
            });
            var i = 0;
            sequence_crdt.traverse(x.S, is_anc, (node) => {
              node.elems.forEach((e) => {
                path.push(`[${i++}]`);
                recurse(e);
                path.pop();
              });
            });
          } else if (x.t === "obj") {
            Object.entries(x.S).forEach((e) => {
              path.push("[" + JSON.stringify(e[0]) + "]");
              recurse(e[1]);
              path.pop();
            });
          } else if (x.t === "str") {
            sequence_crdt.generate_braid(x.S, version, is_anc).forEach((s) => {
              patches.push({
                range: `${path.join("")}[${s[0]}:${s[0] + s[1]}]`,
                content: s[2],
              });
              if (s[3]) sort_keys[patches.length - 1] = s[3];
            });
          }
        }

        return {
          version,
          parents: { ...self.T[version] },
          patches,
          sort_keys,
        };
      }
    };

    /// # json_crdt.apply_bubbles(to_bubble)
    ///
    /// This method helps prune away meta data and compress stuff when we have determined that certain versions can be renamed to other versions – these renamings are expressed in `to_bubble`, where keys are versions and values are "bubbles", each bubble is represented with an array of two elements, the first element is the "bottom" of the bubble, and the second element is the "top" of the bubble. We will use the "bottom" as the new name for the version, and we'll use the "top" as the new parents.
    ///
    /// ``` js 
    /// json_crdt.apply_bubbles({
    ///   alice4: ['bob5', 'alice4'], 
    ///   bob5: ['bob5', 'alice4']
    /// }) 
    /// ```
    self.apply_bubbles = (to_bubble) => {
      function recurse(x) {
        if (is_lit(x)) return x;
        if (x.t == "val") {
          sequence_crdt.apply_bubbles(x.S, to_bubble);
          sequence_crdt.traverse(
            x.S,
            () => true,
            (node) => {
              node.elems = node.elems.slice(0, 1).map(recurse);
            },
            true
          );
          if (
            x.S.nexts.length == 0 &&
            !x.S.next &&
            x.S.elems.length == 1 &&
            is_lit(x.S.elems[0])
          )
            return x.S.elems[0];
          return x;
        }
        if (x.t == "arr") {
          sequence_crdt.apply_bubbles(x.S, to_bubble);
          sequence_crdt.traverse(
            x.S,
            () => true,
            (node) => {
              node.elems = node.elems.map(recurse);
            },
            true
          );
          if (
            x.S.nexts.length == 0 &&
            !x.S.next &&
            x.S.elems.every(is_lit) &&
            !Object.keys(x.S.deleted_by).length
          )
            return { t: "lit", S: x.S.elems.map(get_lit) };
          return x;
        }
        if (x.t == "obj") {
          Object.entries(x.S).forEach((e) => {
            var y = (x.S[e[0]] = recurse(e[1]));
            if (y == null) delete x.S[e[0]];
          });
          if (Object.values(x.S).every(is_lit)) {
            var o = {};
            Object.entries(x.S).forEach((e) => (o[e[0]] = get_lit(e[1])));
            return { t: "lit", S: o };
          }
          return x;
        }
        if (x.t == "str") {
          sequence_crdt.apply_bubbles(x.S, to_bubble);
          if (
            x.S.nexts.length == 0 &&
            !x.S.next &&
            !Object.keys(x.S.deleted_by).length
          )
            return x.S.elems;
          return x;
        }
      }
      self.S = recurse(self.S);

      Object.entries(to_bubble).forEach(([version, bubble]) => {
        if (!self.T[version]) return;

        if (self.my_where_are_they_now)
          self.my_where_are_they_now[version] = bubble[0];

        if (version === bubble[1]) self.T[bubble[0]] = self.T[bubble[1]];

        if (version !== bubble[0]) {
          if (self.root_version == version) self.root_version = bubble[0];
          delete self.T[version];
          delete self.version_cache[version];
          delete self.acked_boundary[version];
          delete self.current_version[version];
          if (
            self.version_groups[version] &&
            self.version_groups[version][0] == version
          ) {
            for (let v of self.version_groups[version]) {
              delete self.version_groups[v];
            }
          }
          for (let [k, parents] of Object.entries(self.T)) {
            self.T[k] = parents = { ...parents };
            for (let p of Object.keys(parents)) {
              if (p == version) delete parents[p];
            }
          }
        } else self.version_cache[version] = null;
      });

      var leaves = Object.keys(self.current_version);
      var acked_boundary = Object.keys(self.acked_boundary);
      var fiss = Object.keys(self.fissures);
      if (
        leaves.length == 1 &&
        acked_boundary.length == 1 &&
        leaves[0] == acked_boundary[0] &&
        fiss.length == 0
      ) {
        self.T = { [leaves[0]]: {} };
        self.S = make_lit(self.read());
      }
    };

    /// # json_crdt.add_version(version, parents, patches[, sort_keys])
    ///
    /// The main method for modifying a `json_crdt` data structure. 
    ///
    /// * `version`: Unique string associated with this edit. 
    /// * `parents`: A set of versions that this version is aware of, represented as a map with versions as keys, and values of `true`. 
    /// * `patches`: An array of patches, each patch looks like this `{range: '.life.meaning', content: 42}`. 
    /// * `sort_keys`: (optional) An object where each key is an index, and the value is a sort_key to use with the patch at the given index in the `patches` array – a sort_key overrides the version for a patch for the purposes of sorting. This can be useful after doing some pruning. 
    ///
    /// ``` js
    /// json_crdt.add_version(
    ///   'alice6', 
    ///   {
    ///     alice5: true, 
    ///     bob7: true
    ///   }, 
    ///   [
    ///     {
    ///       range: '.a.b', 
    ///       content: 'c'
    ///     }
    ///   ]
    /// )
    /// ``` 
    self.add_version = (version, parents, patches, sort_keys) => {
      if (self.T[version]) return;

      if (self.root_version == null) self.root_version = version;

      self.T[version] = { ...parents };

      self.version_cache[version] = JSON.parse(
        JSON.stringify({
          version,
          parents,
          patches,
          sort_keys,
        })
      );

      Object.keys(parents).forEach((k) => {
        if (self.current_version[k]) delete self.current_version[k];
      });
      self.current_version[version] = true;

      if (!sort_keys) sort_keys = {};

      if (!Object.keys(parents).length) {
        var parse = self.parse_patch(patches[0]);
        self.S = make_lit(parse.value);
        return patches;
      }

      let is_anc;
      if (parents == self.current_version) {
        is_anc = (_version) => _version != version;
      } else {
        let ancs = self.ancestors(parents);
        is_anc = (_version) => ancs[_version];
      }

      var rebased_patches = [];
      patches.forEach((patch, i) => {
        var sort_key = sort_keys[i];
        var parse = self.parse_patch(patch);
        var cur = resolve_path(parse);
        if (!parse.slice) {
          if (cur.t != "val") throw Error("bad");
          var len = sequence_crdt.length(cur.S, is_anc);
          sequence_crdt.add_version(
            cur.S,
            version,
            [[0, len, [parse.delete ? null : make_lit(parse.value)], sort_key]],
            is_anc
          );
          rebased_patches.push(patch);
        } else {
          if (typeof parse.value === "string" && cur.t !== "str")
            throw Error(
              `Cannot splice string ${JSON.stringify(
                parse.value
              )} into non-string`
            );
          if (parse.value instanceof Array && cur.t !== "arr")
            throw Error(
              `Cannot splice array ${JSON.stringify(
                parse.value
              )} into non-array`
            );
          if (parse.value instanceof Array)
            parse.value = parse.value.map((x) => make_lit(x));

          var r0 = parse.slice[0];
          var r1 = parse.slice[1];
          if (r0 < 0 || Object.is(r0, -0) || r1 < 0 || Object.is(r1, -0)) {
            let len = sequence_crdt.length(cur.S, is_anc);
            if (r0 < 0 || Object.is(r0, -0)) r0 = len + r0;
            if (r1 < 0 || Object.is(r1, -0)) r1 = len + r1;
          }

          var rebased_splices = sequence_crdt.add_version(
            cur.S,
            version,
            [[r0, r1 - r0, parse.value, sort_key]],
            is_anc
          );
          for (let rebased_splice of rebased_splices)
            rebased_patches.push({
              range: `${parse.path
                .map((x) => `[${JSON.stringify(x)}]`)
                .join("")}[${rebased_splice[0]}:${
                rebased_splice[0] + rebased_splice[1]
              }]`,
              content: rebased_splice[2],
            });
        }
      });

      function resolve_path(parse) {
        var cur = self.S;
        if (!cur || typeof cur != "object" || cur.t == "lit")
          cur = self.S = {
            t: "val",
            S: sequence_crdt.create_node(self.root_version, [cur]),
          };
        var prev_S = null;
        var prev_i = 0;
        for (var i = 0; i < parse.path.length; i++) {
          var key = parse.path[i];
          if (cur.t == "val")
            cur = sequence_crdt.get((prev_S = cur.S), (prev_i = 0), is_anc);
          if (cur.t == "lit") {
            var new_cur = {};
            if (cur.S instanceof Array) {
              new_cur.t = "arr";
              new_cur.S = sequence_crdt.create_node(
                self.root_version,
                cur.S.map((x) => make_lit(x))
              );
            } else {
              if (typeof cur.S != "object") throw Error("bad");
              new_cur.t = "obj";
              new_cur.S = {};
              Object.entries(cur.S).forEach(
                (e) => (new_cur.S[e[0]] = make_lit(e[1]))
              );
            }
            cur = new_cur;
            sequence_crdt.update(prev_S, prev_i, cur, is_anc);
          }
          if (cur.t == "obj") {
            let x = cur.S[key];
            if (!x || typeof x != "object" || x.t == "lit")
              x = cur.S[key] = {
                t: "val",
                S: sequence_crdt.create_node(self.root_version, [
                  x == null ? null : x,
                ]),
              };
            cur = x;
          } else if (i == parse.path.length - 1 && !parse.slice) {
            parse.slice = [key, key + 1];
            parse.value = cur.t == "str" ? parse.value : [parse.value];
          } else if (cur.t == "arr") {
            cur = sequence_crdt.get((prev_S = cur.S), (prev_i = key), is_anc);
          } else throw Error("bad");
        }
        if (parse.slice) {
          if (cur.t == "val")
            cur = sequence_crdt.get((prev_S = cur.S), (prev_i = 0), is_anc);
          if (typeof cur == "string") {
            cur = {
              t: "str",
              S: sequence_crdt.create_node(self.root_version, cur),
            };
            sequence_crdt.update(prev_S, prev_i, cur, is_anc);
          } else if (cur.t == "lit") {
            if (!(cur.S instanceof Array)) throw Error("bad");
            cur = {
              t: "arr",
              S: sequence_crdt.create_node(
                self.root_version,
                cur.S.map((x) => make_lit(x))
              ),
            };
            sequence_crdt.update(prev_S, prev_i, cur, is_anc);
          }
        }
        return cur;
      }

      return rebased_patches;
    };

    /// # json_crdt.get_child_map()
    ///
    /// Returns a map where each key is a version, and each value is a set of child versions, represented as a map with versions as keys, and values of `true`.
    ///
    /// ``` js
    /// json_crdt.get_child_map()
    /// ``` 
    self.get_child_map = () => {
      let children = {};
      Object.entries(self.T).forEach(([v, parents]) => {
        Object.keys(parents).forEach((parent) => {
          if (!children[parent]) children[parent] = {};
          children[parent][v] = true;
        });
      });
      return children;
    };

    /// # json_crdt.ancestors(versions, ignore_nonexistent=false)
    ///
    /// Gather `versions` and all their ancestors into a set. `versions` is a set of versions, i.e. a map with version-keys and values of true – we'll basically return a larger set. If `ignore_nonexistent` is `true`, then we won't throw an exception if we encounter a version that we don't have in our data-structure.
    ///
    /// ``` js
    /// json_crdt.ancestors({
    ///   alice12: true, 
    ///   bob10: true
    /// }) 
    /// ``` 
    self.ancestors = (versions, ignore_nonexistent) => {
      var result = {};
      function recurse(version) {
        if (result[version]) return;
        if (!self.T[version]) {
          if (ignore_nonexistent) return;
          throw Error(`The version ${version} no existo`);
        }
        result[version] = true;
        Object.keys(self.T[version]).forEach(recurse);
      }
      Object.keys(versions).forEach(recurse);
      return result;
    };

    /// # json_crdt.descendants(versions, ignore_nonexistent=false)
    ///
    /// Gather `versions` and all their descendants into a set. `versions` is a set of versions, i.e. a map with version-keys and values of true – we'll basically return a larger set. If `ignore_nonexistent` is `true`, then we won't throw an exception if we encounter a version that we don't have in our data-structure.
    ///
    /// ``` js
    /// json_crdt.descendants({
    ///   alice12: true, 
    ///   bob10: true
    /// }) 
    /// ``` 
    self.descendants = (versions, ignore_nonexistent) => {
      let children = self.get_child_map();
      var result = {};
      function recurse(version) {
        if (result[version]) return;
        if (!self.T[version]) {
          if (ignore_nonexistent) return;
          throw Error(`The version ${version} no existo`);
        }
        result[version] = true;
        Object.keys(children[version] || {}).forEach(recurse);
      }
      Object.keys(versions).forEach(recurse);
      return result;
    };

    /// # json_crdt.get_leaves(versions)
    ///
    /// Returns a set of versions from `versions` which don't also have a child in `versions`. `versions` is itself a set of versions, represented as an object with version keys and `true` values, and the return value is represented the same way.
    self.get_leaves = (versions) => {
      var leaves = { ...versions };
      Object.keys(versions).forEach((v) => {
        Object.keys(self.T[v]).forEach((p) => delete leaves[p]);
      });
      return leaves;
    };

    /// # json_crdt.parse_patch(patch)
    ///
    /// Takes a patch in the form `{range, content}`, and returns an object of the form `{path: [...], [slice: [...]], [delete: true], content}`; basically calling `parse_json_path` on `patch.range`, and adding `patch.content` along for the ride.
    self.parse_patch = (patch) => {
      let x = self.parse_json_path(patch.range);
      x.value = patch.content;
      return x;
    };

    /// # json_crdt.parse_json_path(json_path)
    ///
    /// Parses the string `json_path` into an object like: `{path: [...], [slice: [...]], [delete: true]}`. 
    ///
    /// * `a.b[3]` --> `{path: ['a', 'b', 3]}`
    /// * `a.b[3:5]` --> `{path: ['a', 'b'], slice: [3, 5]}`
    /// * `delete a.b` --> `{path: ['a', 'b'], delete: true}`
    ///
    /// ``` js
    /// console.log(json_crdt.parse_json_path('a.b.c'))
    /// ```
    self.parse_json_path = (json_path) => {
      var ret = { path: [] };
      var re =
        /^(delete)\s+|\.?([^\.\[ =]+)|\[((\-?\d+)(:\-?\d+)?|"(\\"|[^"])*")\]/g;
      var m;
      while ((m = re.exec(json_path))) {
        if (m[1]) ret.delete = true;
        else if (m[2]) ret.path.push(m[2]);
        else if (m[3] && m[5])
          ret.slice = [JSON.parse(m[4]), JSON.parse(m[5].substr(1))];
        else if (m[3]) ret.path.push(JSON.parse(m[3]));
      }
      return ret;
    };

    return self;
  };

  /// # sequence_crdt.create_node(version, elems, [end_cap, sort_key])
  ///
  /// Creates a node for a `sequence_crdt` sequence CRDT with the given properties. The resulting node will look like this:
  ///
  /// ``` js
  /// {
  ///   version, // globally unique string
  ///   elems, // a string or array representing actual data elements of the underlying sequence
  ///   end_cap, // this is useful for dealing with replace operations
  ///   sort_key, // version to pretend this is for the purposes of sorting
  ///   deleted_by : {}, // if this node gets deleted, we'll mark it here
  ///   nexts : [], // array of nodes following this one
  ///   next : null // final node following this one (after all the nexts)
  /// } 
  ///
  /// var sequence_node = sequence_crdt.create_node('alice1', 'hello')
  /// ```
  sequence_crdt.create_node = (version, elems, end_cap, sort_key) => ({
    version,
    sort_key,
    elems,
    end_cap,
    deleted_by: {},
    nexts: [],
    next: null,
  });

  /// # sequence_crdt.generate_braid(root_node, version, is_anc)
  ///  
  /// Reconstructs an array of splice-information which can be passed to `sequence_crdt.add_version` in order to add `version` to another `sequence_crdt` instance – the returned array looks like: `[[insert_pos, delete_count, insert_elems, sort_key, ...], ...]`. `is_anc` is a function which accepts a version string and returns `true` if and only if the given version is an ancestor of `version` (i.e. a version which the author of `version` knew about when they created that version).
  ///
  /// ``` js
  /// var root_node = sequence_crdt.create_node('root', '')
  /// sequence_crdt.add_version(root_node, 'alice1', [[0, 0, 'hello']])
  /// console.log(sequence_crdt.generate_braid(root_node, 'alice1', x => false)) // outputs [[0, 0, "hello", ...]]
  /// ```
  sequence_crdt.generate_braid = (S, version, is_anc, read_array_elements) => {
    if (!read_array_elements) read_array_elements = (x) => x;
    var splices = [];

    function add_ins(offset, ins, sort_key, end_cap, is_row_header) {
      if (typeof ins !== "string")
        ins = ins.map((x) => read_array_elements(x, () => false));
      if (splices.length > 0) {
        var prev = splices[splices.length - 1];
        if (
          prev[0] + prev[1] === offset &&
          !end_cap &&
          (!is_row_header || prev[3] == sort_key) &&
          (prev[4] === "i" || (prev[4] === "r" && prev[1] === 0))
        ) {
          prev[2] = prev[2].concat(ins);
          return;
        }
      }
      splices.push([offset, 0, ins, sort_key, end_cap ? "r" : "i"]);
    }

    function add_del(offset, del, ins) {
      if (splices.length > 0) {
        var prev = splices[splices.length - 1];
        if (prev[0] + prev[1] === offset && prev[4] !== "i") {
          prev[1] += del;
          return;
        }
      }
      splices.push([offset, del, ins, null, "d"]);
    }

    var offset = 0;
    function helper(node, _version, end_cap, is_row_header) {
      if (_version === version) {
        add_ins(
          offset,
          node.elems.slice(0),
          node.sort_key,
          end_cap,
          is_row_header
        );
      } else if (node.deleted_by[version] && node.elems.length > 0) {
        add_del(offset, node.elems.length, node.elems.slice(0, 0));
      }

      if (
        (!_version || is_anc(_version)) &&
        !Object.keys(node.deleted_by).some(is_anc)
      ) {
        offset += node.elems.length;
      }

      node.nexts.forEach((next) =>
        helper(next, next.version, node.end_cap, true)
      );
      if (node.next) helper(node.next, _version);
    }
    helper(S, null);
    splices.forEach((s) => {
      // if we have replaces with 0 deletes,
      // make them have at least 1 delete..
      // this can happen when there are multiple replaces of the same text,
      // and our code above will associate those deletes with only one of them
      if (s[4] === "r" && s[1] === 0) s[1] = 1;
    });
    return splices;
  };

  /// # sequence_crdt.apply_bubbles(root_node, to_bubble)
  ///
  /// This method helps prune away meta data and compress stuff when we have determined that certain versions can be renamed to other versions – these renamings are expressed in `to_bubble`, where keys are versions and values are "bubbles", each bubble is represented with an array of two elements, the first element is the "bottom" of the bubble, and the second element is the "top" of the bubble. We will use the "bottom" as the new name for the version, and we'll use the "top" as the new parents.
  /// 
  /// ``` js
  /// sequence_crdt.apply_bubbles(root_node, {
  ///   alice4: ['bob5', 'alice4'],
  ///   bob5: ['bob5', 'alice4']
  /// })
  /// ```
  sequence_crdt.apply_bubbles = (S, to_bubble) => {
    sequence_crdt.traverse(
      S,
      () => true,
      (node) => {
        if (
          to_bubble[node.version] &&
          to_bubble[node.version][0] != node.version
        ) {
          if (!node.sort_key) node.sort_key = node.version;
          node.version = to_bubble[node.version][0];
        }

        for (var x of Object.keys(node.deleted_by)) {
          if (to_bubble[x]) {
            delete node.deleted_by[x];
            node.deleted_by[to_bubble[x][0]] = true;
          }
        }
      },
      true
    );

    function set_nnnext(node, next) {
      while (node.next) node = node.next;
      node.next = next;
    }

    do_line(S, S.version);
    function do_line(node, version) {
      var prev = null;
      while (node) {
        if (node.nexts[0] && node.nexts[0].version == version) {
          for (let i = 0; i < node.nexts.length; i++) {
            delete node.nexts[i].version;
            delete node.nexts[i].sort_key;
            set_nnnext(
              node.nexts[i],
              i + 1 < node.nexts.length ? node.nexts[i + 1] : node.next
            );
          }
          node.next = node.nexts[0];
          node.nexts = [];
        }

        if (node.deleted_by[version]) {
          node.elems = node.elems.slice(0, 0);
          node.deleted_by = {};
          if (prev) {
            node = prev;
            continue;
          }
        }

        var next = node.next;

        if (
          !node.nexts.length &&
          next &&
          (!node.elems.length ||
            !next.elems.length ||
            (Object.keys(node.deleted_by).every((x) => next.deleted_by[x]) &&
              Object.keys(next.deleted_by).every((x) => node.deleted_by[x])))
        ) {
          if (!node.elems.length) node.deleted_by = next.deleted_by;
          node.elems = node.elems.concat(next.elems);
          node.end_cap = next.end_cap;
          node.nexts = next.nexts;
          node.next = next.next;
          continue;
        }

        if (next && !next.elems.length && !next.nexts.length) {
          node.next = next.next;
          continue;
        }

        for (let n of node.nexts) do_line(n, n.version);

        prev = node;
        node = next;
      }
    }
  };

  /// # sequence_crdt.get(root_node, i, is_anc)
  /// 
  /// Returns the element at the `i`th position (0-based) in the `sequence_crdt` rooted at `root_node`, when only considering versions which result in `true` when passed to `is_anc`.
  /// 
  /// ``` js
  /// var x = sequence_crdt.get(root_node, 2, {
  ///     alice1: true
  /// })
  /// ```
  sequence_crdt.get = (S, i, is_anc) => {
    var ret = null;
    var offset = 0;
    sequence_crdt.traverse(S, is_anc ? is_anc : () => true, (node) => {
      if (i - offset < node.elems.length) {
        ret = node.elems[i - offset];
        return false;
      }
      offset += node.elems.length;
    });
    return ret;
  };

  /// # sequence_crdt.update(root_node, i, v, is_anc)
  /// 
  /// Sets the element at the `i`th position (0-based) in the `sequence_crdt` rooted at `root_node` to the value `v`, when only considering versions which result in `true` when passed to `is_anc`.
  /// 
  /// ``` js
  /// sequence_crdt.update(root_node, 2, 'x', {
  ///   alice1: true
  /// })
  /// ```
  sequence_crdt.update = (S, i, v, is_anc) => {
    var offset = 0;
    sequence_crdt.traverse(S, is_anc ? is_anc : () => true, (node) => {
      if (i - offset < node.elems.length) {
        if (typeof node.elems == "string")
          node.elems =
            node.elems.slice(0, i - offset) +
            v +
            node.elems.slice(i - offset + 1);
        else node.elems[i - offset] = v;
        return false;
      }
      offset += node.elems.length;
    });
  };

  /// # sequence_crdt.length(root_node, is_anc)
  /// 
  /// Returns the length of the `sequence_crdt` rooted at `root_node`, when only considering versions which result in `true` when passed to `is_anc`.
  /// 
  /// ``` js
  /// console.log(sequence_crdt.length(root_node, {
  ///  alice1: true
  /// }))
  /// ```
  sequence_crdt.length = (S, is_anc) => {
    var count = 0;
    sequence_crdt.traverse(S, is_anc ? is_anc : () => true, (node) => {
      count += node.elems.length;
    });
    return count;
  };

  /// # sequence_crdt.break_node(node, break_position, end_cap, new_next)
  /// 
  /// This method breaks apart a `sequence_crdt` node into two nodes, each representing a subsequence of the sequence represented by the original node. The `node` parameter is modified into the first node, and the second node is returned. The first node represents the elements of the sequence before `break_position`, and the second node represents the rest of the elements. If `end_cap` is truthy, then the first node will have `end_cap` set – this is generally done if the elements in the second node are being replaced. This method will add `new_next` to the first node's `nexts` array.
  /// 
  /// ``` js
  /// var node = sequence_crdt.create_node('alice1', 'hello') // node.elems == 'hello'
  /// var second = sequence_crdt.break_node(node, 2) // now node.elems == 'he', and second.elems == 'llo'
  /// ```
  sequence_crdt.break_node = (node, x, end_cap, new_next) => {
    var tail = sequence_crdt.create_node(
      null,
      node.elems.slice(x),
      node.end_cap
    );
    Object.assign(tail.deleted_by, node.deleted_by);
    tail.nexts = node.nexts;
    tail.next = node.next;

    node.elems = node.elems.slice(0, x);
    node.end_cap = end_cap;
    node.nexts = new_next ? [new_next] : [];
    node.next = tail;

    return tail;
  };

  /// # sequence_crdt.add_version(root_node, version, splices, [is_anc])
  /// 
  /// This is the main method in sequence_crdt, used to modify the sequence. The modification must be given a unique `version` string, and the modification itself is represented as an array of `splices`, where each splice looks like this: `[position, num_elements_to_delete, elements_to_insert, optional_sort_key]`. 
  /// 
  /// Note that all positions are relative to the original sequence, before any splices have been applied. Positions are counted by only considering nodes with versions which result in `true` when passed to `is_anc`. (and are not `deleted_by` any versions which return `true` when passed to `is_anc`).
  /// 
  /// ``` js
  /// var node = sequence_crdt.create_node('alice1', 'hello') 
  /// sequence_crdt.add_version(node, 'alice2', [[5, 0, ' world']], v => v == 'alice1') 
  /// ```
  sequence_crdt.add_version = (S, version, splices, is_anc) => {
    var rebased_splices = [];

    function add_to_nexts(nexts, to) {
      var i = binarySearch(nexts, function (x) {
        if ((to.sort_key || to.version) < (x.sort_key || x.version)) return -1;
        if ((to.sort_key || to.version) > (x.sort_key || x.version)) return 1;
        return 0;
      });
      nexts.splice(i, 0, to);
    }

    var si = 0;
    var delete_up_to = 0;

    var process_patch = (node, offset, has_nexts, prev, _version, deleted) => {
      var s = splices[si];
      if (!s) return;
      var sort_key = s[3];

      if (deleted) {
        if (s[1] == 0 && s[0] == offset) {
          if (node.elems.length == 0 && !node.end_cap && has_nexts) return;
          var new_node = sequence_crdt.create_node(
            version,
            s[2],
            null,
            sort_key
          );

          fresh_nodes.add(new_node);

          if (node.elems.length == 0 && !node.end_cap)
            add_to_nexts(node.nexts, new_node);
          else sequence_crdt.break_node(node, 0, undefined, new_node);
          si++;
        }

        if (
          delete_up_to <= offset &&
          s[1] &&
          s[2] &&
          s[0] == offset &&
          node.end_cap &&
          !has_nexts &&
          (node.next && node.next.elems.length) &&
          !Object.keys(node.next.deleted_by).some((version) => f(version))
        ) {
          delete_up_to = s[0] + s[1];

          var new_node = sequence_crdt.create_node(
            version,
            s[2],
            null,
            sort_key
          );

          fresh_nodes.add(new_node);

          add_to_nexts(node.nexts, new_node);
        }

        return;
      }

      if (s[1] == 0) {
        var d = s[0] - (offset + node.elems.length);
        if (d > 0) return;
        if (d == 0 && !node.end_cap && has_nexts) return;
        var new_node = sequence_crdt.create_node(version, s[2], null, sort_key);

        fresh_nodes.add(new_node);

        if (d == 0 && !node.end_cap) {
          add_to_nexts(node.nexts, new_node);
        } else {
          sequence_crdt.break_node(node, s[0] - offset, undefined, new_node);
        }
        si++;
        return;
      }

      if (delete_up_to <= offset) {
        var d = s[0] - (offset + node.elems.length);

        let add_at_end =
          d == 0 &&
          s[2] &&
          node.end_cap &&
          !has_nexts &&
          (node.next && node.next.elems.length) &&
          !Object.keys(node.next.deleted_by).some((version) => f(version));

        if (d > 0 || (d == 0 && !add_at_end)) return;

        delete_up_to = s[0] + s[1];

        if (s[2]) {
          var new_node = sequence_crdt.create_node(
            version,
            s[2],
            null,
            sort_key
          );

          fresh_nodes.add(new_node);

          if (add_at_end) {
            add_to_nexts(node.nexts, new_node);
          } else {
            sequence_crdt.break_node(node, s[0] - offset, true, new_node);
          }
          return;
        } else {
          if (s[0] == offset) {
          } else {
            sequence_crdt.break_node(node, s[0] - offset);
            return;
          }
        }
      }

      if (delete_up_to > offset) {
        if (delete_up_to <= offset + node.elems.length) {
          if (delete_up_to < offset + node.elems.length) {
            sequence_crdt.break_node(node, delete_up_to - offset);
          }
          si++;
        }
        node.deleted_by[version] = true;
        return;
      }
    };

    var f = is_anc || (() => true);
    var offset = 0;
    var rebase_offset = 0;
    let fresh_nodes = new Set();
    function traverse(node, prev, version) {
      if (!version || f(version)) {
        var has_nexts = node.nexts.find((next) => f(next.version));
        var deleted = Object.keys(node.deleted_by).some((version) =>
          f(version)
        );
        let rebase_deleted = Object.keys(node.deleted_by).length;
        process_patch(node, offset, has_nexts, prev, version, deleted);

        if (!deleted) offset += node.elems.length;
        if (!rebase_deleted && Object.keys(node.deleted_by).length)
          rebased_splices.push([rebase_offset, node.elems.length, ""]);
      }
      if (fresh_nodes.has(node))
        rebased_splices.push([rebase_offset, 0, node.elems]);
      if (!Object.keys(node.deleted_by).length)
        rebase_offset += node.elems.length;

      for (var next of node.nexts) traverse(next, null, next.version);
      if (node.next) traverse(node.next, node, version);
    }
    traverse(S, null, S.version);

    return rebased_splices;
  };

  /// # sequence_crdt.traverse(root_node, is_anc, callback, [view_deleted, tail_callback])
  /// 
  /// Traverses the subset of nodes in the tree rooted at `root_node` whose versions return `true` when passed to `is_anc`. For each node, `callback` is called with these parameters: `node, offset, has_nexts, prev, version, deleted`, 
  /// 
  /// Where
  /// - `node` is the current node being traversed
  /// - `offset` says how many elements we have passed so far 
  /// - `has_nexts` is true if some of this node's `nexts` will be traversed according to `is_anc`
  /// - `prev` is a pointer to the node whos `next` points to this one, or `null` if this is the root node
  /// - `version` is the version of this node, or this node's `prev` if our version is `null`, or that node's `prev` if it is also `null`, etc
  /// - `deleted` is true if this node is deleted according to `is_anc`
  /// 
  /// Usually we skip deleted nodes when traversing, but we'll include them if `view_deleted` is `true`. 
  /// 
  /// `tail_callback` is an optional callback that will get called with a single parameter `node` after all of that node's children `nexts` and `next` have been traversed. 
  /// 
  /// ``` js
  /// sequence_crdt.traverse(node, () => true, node =>
  ///   process.stdout.write(node.elems)) 
  /// ```
  sequence_crdt.traverse = (S, f, cb, view_deleted, tail_cb) => {
    var offset = 0;
    function helper(node, prev, version) {
      var has_nexts = node.nexts.find((next) => f(next.version));
      var deleted = Object.keys(node.deleted_by).some((version) => f(version));
      if (view_deleted || !deleted) {
        if (cb(node, offset, has_nexts, prev, version, deleted) == false)
          return true;
        offset += node.elems.length;
      }
      for (var next of node.nexts)
        if (f(next.version)) {
          if (helper(next, null, next.version)) return true;
        }
      if (node.next) {
        if (helper(node.next, node, version)) return true;
      } else if (tail_cb) tail_cb(node);
    }
    helper(S, null, S.version);
  };

  // modified from https://stackoverflow.com/questions/22697936/binary-search-in-javascript
  function binarySearch(ar, compare_fn) {
    var m = 0;
    var n = ar.length - 1;
    while (m <= n) {
      var k = (n + m) >> 1;
      var cmp = compare_fn(ar[k]);
      if (cmp > 0) {
        m = k + 1;
      } else if (cmp < 0) {
        n = k - 1;
      } else {
        return k;
      }
    }
    return m;
  }
})();

if (typeof module != "undefined")
  module.exports = {
    create_antimatter_crdt,
    create_json_crdt,
    sequence_crdt,
  };


================================================
FILE: antimatter/doc.html
================================================
<head>
<link rel="stylesheet" href="https://unpkg.com/@highlightjs/cdn-assets@11.1.0/styles/default.min.css">
</head>

<script src="https://unpkg.com/marked@4.0.5"></script>

<script src="https://unpkg.com/@highlightjs/cdn-assets@11.1.0/highlight.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.1.0/languages/javascript.min.js"></script>

<body></body>
<script>

;(async () => {
    let x = await fetch(`https://braid-org.github.io/braidjs/antimatter/antimatter.js`)
    x = await x.text()
    let code = x

    x = await fetch(`https://braid-org.github.io/braidjs/antimatter/readme.md`)
    x = await x.text()
    let md = x

    let code_blocks = []
    if (true) {
        let agg = code_blocks = []
        let prev_o = 0
        code.replace(/^[\t ]*(?:antimatter|self|json|sequence)\.(?:.*?) = (?:.*?)=> \(?\{\n|^[\t ]*(?:\} else )?if \(type == (?:.*?)\) \{\n/gm, (_0, o) => {
            agg.push(code.slice(prev_o, o))
            prev_o = o
        })
        agg.push(code.slice(prev_o))
    }
    code_blocks = code_blocks.filter(x => x)

    let md_blocks = []
    if (true) {
        let agg = md_blocks = []
        let prev_o = 0
        md.replace(/^(?:# antimatter|# json|# sequence|## message)/gm, (_0, o) => {
            agg.push(md.slice(prev_o, o))
            prev_o = o
        })
        agg.push(md.slice(prev_o))
    }
    md_blocks = md_blocks.filter(x => x)

    function make_md(s) {
        let d = make_html(`<div style="background:hsl(${Math.random() * 360}, 100%, 100%);width:50%"></div>`)
        d.innerHTML = marked.parse(s)
        return d
    }

    function make_code(s) {
        let vv = hljs.highlight(s, {language: 'javascript'}).value
        let d = make_html(`<pre style="margin:0px;background:hsl(${Math.random() * 360}, 100%, 100%);width:50%">${vv}</pre>`)
        return d
    }

    while (md_blocks.length) {
        let left = md_blocks.shift()
        let right = code_blocks.shift()

        let d = make_html(`<div style="border-top:1px solid black;display:flex;align-items: start;"></div>`)
        d.append(make_md(left))
        d.append(make_code(right))
        document.body.append(d)
    }
})()

function make_html(s) {
    let d = document.createElement('div')
    d.innerHTML = s
    return d.firstChild
}

</script>


================================================
FILE: antimatter/package.json
================================================
{
  "name": "@braidjs/antimatter",
  "version": "0.0.34",
  "description": "antimatter: a pruning algorithm for CRDTs and other mergeables",
  "main": "antimatter.js",
  "scripts": {
    "test": "node test.js"
  },
  "author": "Braid Working Group",
  "repository": "braid-org/braidjs",
  "homepage": "https://braid.org/antimatter"
}


================================================
FILE: antimatter/readme.md
================================================
# MOVED TO https://github.com/braid-org/antimatter

--

# antimatter: an algorithm that prunes CRDT/OT history

[Antimatter](https://braid.org/antimatter) is the world's first peer-to-peer synchronization algorithm that can prune its history in a network where peers disconnect, reconnect, and merge offline edits.  Antimatter supports arbitrary simultaneous edits, from arbitrary peers, under arbitrary network delays and partitions, and guarantees full CRDT/OT consistency, while pruning unnecessary history within each partitioned subnet, and across subnets once they reconnect.  In steady state, it prunes down to zero overhead.  This lets you put synchronizing data structures in more parts of your software, without worrying about memory overhead.

This package implements an antimatter peer composed of three objects:

```js
var {create_antimatter_crdt, create_json_crdt, sequence_crdt} = require('@braidjs/antimatter')
```

- *antimatter_crdt*: created using `create_antimatter_crdt`, this object is a json_crdt with antimatter algorithm methods added to it so that it can communicate with other peers to learn which history can be pruned, and tells the underlying json_crdt object to prune it.
- *json_crdt*: created using `create_json_crdt`, this object is a pruneable JSON CRDT — "JSON" meaning it represents an arbitrary JSON datstructure, and "CRDT" and "pruneable" having the same meaning as for sequence_crdt below. The json_crdt makes recursive use of sequence_crdt structures to represent arbitrary JSON (for instance, a map is represented with a sequence_crdt structure for each value, where the first element in the sequence is the value).
- *sequence_crdt*: methods to manipulate a pruneable sequence CRDT — "sequence" meaning it represents a javascript string or array, "CRDT" meaning this structure can be merged with other ones, and "pruneable" meaning that it supports an operation to remove meta-data when it is no longer needed (whereas CRDT's often keep track of this meta-data forever).

The Antimatter Algorithm was invented by Michael Toomim and Greg Little in the
[Braid Project](https://braid.org) of [Invisible College](https://invisible.college/).

[Click here to see more details, and the API side-by-side with the source code.](https://braid.org/antimatter)


================================================
FILE: antimatter/test.html
================================================
<body></body>
<script>

let real_random = Math.random

function print(...args) {
    let d = document.createElement('div')
    let angle = real_random() * 360
    d.style.background = `hsl(${angle},100%,${args[0]?.startsWith?.('i = ') ? 85 : 95}%)`
    d.style.border = `3px solid hsl(${angle},100%,85%)`
    d.style.display = 'grid'
    d.style['grid-template-columns'] = '1fr '.repeat(args.length)
    for (let a of args) {
        if (typeof a == 'string') {
            let dd = document.createElement('div')
            dd.textContent = a
            d.append(dd)
        } else {
            let dd = document.createElement('pre')
            dd.style.fontSize = '50%'
            dd.textContent = JSON.stringify(a, null, '    ')
            d.append(dd)
        }
    }
    document.body.append(d)
}

console.log = print

</script>
<script src="antimatter.js"></script>
<script src="https://dglittle.github.io/cdn/random002.js"></script>

<script>

;(async () => {

    let best_seed = null
    let best_n = Infinity
    let last_n

    for (let i = 0; i < 100; i++) {
        let seed = 'BASE_' + i
        let r = run_test(seed, false)
        if (!r) {
            console.log(`seed "${seed}" FAILED after ${last_n} steps`)
            if (last_n < best_n) {
                best_n = last_n
                best_seed = seed
            }
        } else {
            console.log(`seed "${seed}" ${r === true ? 'passed' : 'cancelled'} after ${last_n} steps!`)
        }

        if (best_seed != null) {
            console.log(`    (smallest failed seed: "${best_seed}", after ${best_n} steps)`)
        }

        await new Promise(done => setTimeout(done, 10))
        document.body.scrollTop = document.body.scrollHeight
    }
    if (best_seed == null) console.log(`ALL PASSED!`)
    document.body.scrollTop = document.body.scrollHeight

    function run_test(seed, verbose) {
        try {
        Math.randomSeed(seed)

        let num_peers = Math.floor(Math.random() * 5) + 1
        let steps = Math.floor(Math.random() * 200)

        last_n = 0

        let peers = []
        let conns = {}
        let next_conn_id = 0

        for (let i = 0; i < num_peers; i++) {
            peers.push(create_antimatter_crdt(msg => {
                let c = conns[msg.conn]

                if (c?.[i]?.other == null) {
                    debugger
                }

                if (verbose) console.log(`    send p${i}->p${c?.[i]?.other}(conn:${msg.conn}) msg:${msg.type}`)

                c?.[c?.[i]?.other]?.q.push(msg)
            }, () => 123, () => 1, () => {}, {id: i}))
        }

        peers[0].update({range: '', content: ''})

        for (let i = 0; i < steps; i++) {
            if (verbose) console.log(`i = ${i}`)

            last_n++
            if (last_n > best_n) return "we've seen better"

            if (Math.random() < 1/3) {
                // edit

                let can_do = peers.filter(p => p.read() != null || p.id == 0)
                let p = can_do[Math.floor(Math.random() * can_do.length)]
                let text = p.read()

                let start = Math.round(Math.random() * text.length)
                let end = start + Math.round(Math.random() * (text.length - start))
                let content = String.fromCharCode('a'.charCodeAt(0) + Math.floor(Math.random() * 26)).repeat(Math.floor(Math.random() * 4))

                if (verbose) console.log(`edit p${p.id} [${start}:${end}]=${content}`)

                let v = p.update({range: `[${start}:${end}]`, content})
            } else if (Math.random() < 0.5) {
                if (Math.random() < 0.5) {
                    // connect

                    if (peers.length > 1) {
                        let p1 = peers[Math.floor(Math.random() * peers.length)]
                        let p2 = p1
                        while (p2 == p1) p2 = peers[Math.floor(Math.random() * peers.length)]

                        let conn = next_conn_id++
                        conns[conn] = {
                            [p1.id]: {other: p2.id, q: []},
                            [p2.id]: {other: p1.id, q: []}
                        }

                        if (verbose) console.log(`conn p${p1.id} -> p${p2.id} (conn:${conn})`)

                        p1.subscribe(conn)
                    }
                } else {
                    // disconnect

                    let conn_keys = Object.keys(conns)
                    if (conn_keys.length) {
                        let conn = conn_keys[Math.floor(Math.random() * conn_keys.length)]
                        let c = conns[conn]
                        let peer_keys = Object.keys(c)
                        let p = peers[peer_keys[Math.floor(Math.random() * peer_keys.length)]]
                        let other = c[p.id].other

                        if (peer_keys.length == 1) delete conns[conn]
                        else delete c[p.id]

                        if (verbose) console.log(`diss p${p.id} (conn:${conn}, ${other})`)

                        if (p.conns[conn] != null || p.proto_conns[conn]) p.disconnect(conn)
                    }
                }
            } else {
                // message pump

                let conn_keys = Object.keys(conns)
                if (conn_keys.length) {
                    let conn = conn_keys[Math.floor(Math.random() * conn_keys.length)]
                    let c = conns[conn]
                    let peer_keys = Object.keys(c)
                    let p = peers[peer_keys[Math.floor(Math.random() * peer_keys.length)]]

                    let msg = c[p.id].q.shift()

                    if (msg) {
                        if (verbose) console.log(`recv p${p.id} (conn:${conn}) msg:${msg.type} :: ${JSON.stringify(msg)}`)

                        p.receive(msg)
                    }
                }            
            }

            if (verbose) console.log(...peers.map(p => ({T: p.T, f: p.fissures})))
        }

        if (verbose) console.log(`----clean conns----`)
        for (let [conn, c] of Object.entries(conns)) {
            let peer_keys = Object.keys(c)
            if (peer_keys.length < 2) {
                let p = peers[peer_keys[0]]
                let other = c[p.id].other

                delete conns[conn]

                if (verbose) console.log(`diss p${p.id} (conn:${conn})`)

                if (p.conns[conn] != null || p.proto_conns[conn]) p.disconnect(conn)
            }
        }

        if (verbose) console.log(`----conn all----`)
        for (let i = 1; i < peers.length; i++) {
            let p1 = peers[i]
            let p2 = peers[Math.floor(Math.random() * i)]

            let conn = next_conn_id++
            conns[conn] = {
                [p1.id]: {other: p2.id, q: []},
                [p2.id]: {other: p1.id, q: []}
            }

            if (verbose) console.log(`conn p${p1.id} -> p${p2.id} (conn:${conn})`)

            p1.subscribe(conn)
        }

        function pump_all() {
            if (verbose) console.log(`----pump all----`)
            for (let i = 20000; i >= 0; i--) {
                if (i == 0) {
                    console.log(`safety limit exceeded!`)
                    throw 'bad'
                }
                if (verbose) console.log(`i = ${i}`)

                last_n++

                let options = []

                for (let [conn, c] of Object.entries(conns)) {
                    for (let [pk, pp] of Object.entries(c)) {
                        if (pp.q.length) {
                            options.push(() => {
                                let p = peers[pk]
                                let msg = pp.q.shift()

                                if (verbose) {
                                    console.log(`recv p${p.id} (conn:${conn}, ${conns[conn][p.id].other}) msg:${msg.type}, ${JSON.stringify(msg)}`)
                                }

                                p.receive(msg)

                                if (verbose) {
                                    console.log(...peers.map(p => ({T: p.T, f: p.fissures})))
                                }
                            })
                        }
                    }
                }

                if (options.length) {
                    options[Math.floor(Math.random() * options.length)]()
                } else break
            }
        }

        pump_all()

        if (verbose) console.log(`----resend fissures----`)
        for (let p of peers) {
            if (verbose) console.log(`p${p.id} sending fissures`)

            for (let c of Object.keys(p.conns)) p.send({type: 'welcome', versions: [], fissures: Object.values(p.fissures), conn: c})
        }

        pump_all()

        if (verbose) console.log(`----joiner----`)

        peers[0].update({range: '[0:0]', content: '_'})

        pump_all()

        let final_text = peers[0].S
        if (typeof final_text != 'string') {
            console.log('final not a string: ', final_text)
            return false
        }
        for (let p of peers) {
            if (p.S != final_text) {
                console.log(`peer not in line (we want ${final_text}): `, p)
                return false
            }
            if (Object.keys(p.T).length != 1) {
                console.log('peer has big T: ', p)
                return false
            }
            if (Object.keys(p.fissures).length != 0) {
                console.log('peer has fissures: ', p)
                return false
            }
        }

        return true
        } catch (e) {
            console.log(`E: ${e}`, e.stack)
            return false
        }
    }

})()

</script>


================================================
FILE: antimatter_ts/antimatter.js
================================================
/// # Software Architecture
/// The software is architected into three objects:
///
/// ``` js
/// var {create_antimatter_crdt, create_json_crdt, sequence_crdt} = require('@braidjs/antimatter') 
/// ```

// v522

/// - *antimatter_crdt*: created using `create_antimatter_crdt`, this object is a json_crdt with antimatter algorithm methods added to it so that it can communicate with other peers to learn which history can be pruned, and tells the underlying json_crdt object to prune it.
var create_antimatter_crdt;

/// - *json_crdt*: created using `create_json_crdt`, this object is a pruneable
///   JSON CRDT — "JSON" meaning it represents an arbitrary JSON datstructure, and
///   "CRDT" and "pruneable" having the same meaning as for sequence_crdt below. The
///   json_crdt makes recursive use of sequence_crdt structures to represent
///   arbitrary JSON (for instance, a map is represented with a sequence_crdt
///   structure for each value, where the first element in the sequence is the
///   value).
var create_json_crdt;

/// - *sequence_crdt*: methods to manipulate a pruneable sequence CRDT —
///   "sequence" meaning it represents a javascript string or array, "CRDT" meaning
///   this structure can be merged with other ones, and "pruneable" meaning that it
///   supports an operation to remove meta-data when it is no longer needed (whereas
///   CRDT's often keep track of this meta-data forever).
var sequence_crdt = {};

(() => {
  /// # create_antimatter_crdt(send[, init])
  ///
  /// Creates and returns a new antimatter_crdt object (or adds antimatter_crdt methods and properties to `init`).
  ///
  /// * `send`: A callback function to be called whenever this antimatter_crdt wants to send a
  ///   message over a connection registered with `get` or `connect`. The sole
  ///   parameter to this function is a JSONafiable object that hopes to be passed to
  ///   the `receive` method on the antimatter_crdt object at the other end of the
  ///   connection specified in the `conn` key.
  /// * `get_time`: function that returns a number representing time (e.g. `Date.now()`)
  /// * `set_timeout`: function that takes a callback and timeout length, and calls that callback after that amount of time; also returns an identifier that can be passed to `clear_timeout` to cancel the timeout (e.g. wrapping the javascript setTimeout)
  /// * `clear_timeout`: function that takes a timeout identifier an cancels it (e.g. wrapping the javascript clearTimeout)
  /// * `init`: (optional) An antimatter_crdt object to start with, which we'll add any properties to that it doesn't have, and we'll add all the antimatter_crdt methods to it. This option exists so you can serialize an antimatter_crdt instance as JSON, and then restore it later. 
  /// ``` js
  /// var antimatter_crdt = create_antimatter_crdt(msg => {
  ///     websockets[msg.conn].send(JSON.stringify(msg))
  ///   },
  ///   () => Date.now(),
  ///   (func, t) => setTimeout(func, t),
  ///   (t) => clearTimeout(t)),
  ///.  JSON.parse(fs.readFileSync('./antimatter.backup'))
  /// )
  /// ```
  create_antimatter_crdt = (
    send,
    get_time,
    set_timeout,
    clear_timeout,
    self
  ) => {
    self = create_json_crdt(self);
    self.send = send;

    self.id = self.id || Math.random().toString(36).slice(2);
    self.next_seq = self.next_seq || 0;

    self.conns = self.conns || {};
    self.proto_conns = self.proto_conns || {};
    self.conn_count = self.conn_count || 0;

    self.fissures = self.fissures || {};
    self.acked_boundary = self.acked_boundary || {};
    self.marcos = self.marcos || {};
    self.forget_cbs = self.forget_cbs || {};

    self.version_groups = self.version_groups || {};

    self.marco_map = self.marco_map || {};
    self.marco_time_est_1 = self.marco_time_est_1 || 1000;
    self.marco_time_est_2 = self.marco_time_est_2 || 1000;
    self.marco_current_wait_time = self.marco_current_wait_time || 1000;
    self.marco_increases_allowed = 1;
    self.marco_timeout = self.marco_timeout || null;

    function raw_add_version_group(version_array) {
      let version_map = {};
      for (let v of version_array) {
        if (version_map[v]) continue;
        version_map[v] = true;
        if (self.version_groups[v]) self.version_groups[v].forEach((v) => (version_map[v] = true));
      }
      let version_group = Object.keys(version_map).sort();
      version_group.forEach((v) => (self.version_groups[v] = version_group));
      return version_group;
    }

    function get_parent_and_child_sets(children) {
      let parent_sets = {};
      let child_sets = {};
      let done = {};
      function add_set_to_sets(s, sets, mark_done) {
        let container = { members: s };
        let array = Object.keys(s);
        if (array.length < 2) return;
        for (let v of array) {
          sets[v] = container;
          if (mark_done) done[v] = true;
        }
      }
      add_set_to_sets(self.current_version, parent_sets, true);
      for (let v of Object.keys(self.T)) {
        if (done[v]) continue;
        done[v] = true;
        if (!children[v]) continue;
        let first_child_set = children[v];
        let first_child_array = Object.keys(first_child_set);
        let first_parent_set = self.T[first_child_array[0]];
        let first_parent_array = Object.keys(first_parent_set);
        if (
          first_child_array.every((child) => {
            let parent_set = self.T[child];
            let parent_array = Object.keys(parent_set);
            return (
              parent_array.length == first_parent_array.length &&
              parent_array.every((parent) => first_parent_set[parent])
            );
          }) &&
          first_parent_array.every((parent) => {
            let child_set = children[parent];
            let child_array = Object.keys(child_set);
            return (
              child_array.length == first_child_array.length &&
              child_array.every((child) => first_child_set[child])
            );
          })
        ) {
          add_set_to_sets(first_parent_set, parent_sets, true);
          add_set_to_sets(first_child_set, child_sets);
        }
      }
      return { parent_sets, child_sets };
    }

    function find_one_bubble(bottom, children, child_sets, restricted) {
      let expecting = { ...bottom };
      let seen = {};
      Object.keys(bottom).forEach(
        (v) =>
          children[v] &&
          Object.keys(children[v]).forEach((v) => (seen[v] = true))
      );
      let q = Object.keys(expecting);
      let last_top = null;
      while (q.length) {
        cur = q.shift();
        if (!self.T[cur]) {
          if (!restricted) throw "bad";
          else return last_top;
        }
        if (restricted && restricted[cur]) return last_top;

        if (seen[cur]) continue;

        if (children[cur] && !Object.keys(children[cur]).every((c) => seen[c]))
          continue;
        seen[cur] = true;
        delete expecting[cur];

        if (!Object.keys(expecting).length) {
          last_top = { [cur]: true };
          if (!restricted) return last_top;
        }

        Object.keys(self.T[cur]).forEach((p) => {
          expecting[p] = true;
          q.push(p);
        });

        if (
          child_sets[cur] &&
          Object.keys(child_sets[cur].members).every((v) => seen[v])
        ) {
          let expecting_array = Object.keys(expecting);
          let parent_set = self.T[cur];
          let parent_array = Object.keys(parent_set);
          if (
            expecting_array.length == parent_array.length &&
            expecting_array.every((v) => parent_set[v])
          ) {
            last_top = child_sets[cur].members;
            if (!restricted) return last_top;
          }
        }
      }
      return last_top;
    }

    function add_version_group(version_array) {
      let version_group = raw_add_version_group(version_array);
      if (!version_array.some((x) => self.T[x])) return version_group[0];

      let children = self.get_child_map();
      let { parent_sets, child_sets } = get_parent_and_child_sets(children);

      let to_bubble = {};
      function mark_bubble(v, bubble) {
        if (to_bubble[v]) return;
        to_bubble[v] = bubble;
        for (let vv of Object.keys(self.T[v])) mark_bubble(vv, bubble);
      }

      let bottom = Object.fromEntries(
        version_group.filter((x) => self.T[x]).map((x) => [x, true])
      );
      let top = find_one_bubble(bottom, children, child_sets);
      let bubble = [Object.keys(bottom).sort()[0], Object.keys(top)[0]];
      for (let v of Object.keys(top)) to_bubble[v] = bubble;
      for (let v of Object.keys(bottom)) mark_bubble(v, bubble);

      self.apply_bubbles(to_bubble);
      return version_group[0];
    }

    let orig_send = send;
    send = (x) => {
      if (self.version_groups[x.version])
        x.version = self.version_groups[x.version];
      if (x.parents) {
        x.parents = { ...x.parents };
        Object.keys(x.parents).forEach((v) =>
          self.version_groups[v] && self.version_groups[v].forEach((v) => (x.parents[v] = true))
        );
      }
      if (Array.isArray(x.versions)) {
        x.versions = JSON.parse(JSON.stringify(x.versions));
        x.versions.forEach(
          (v) =>
            self.version_groups[v.version] &&
            (v.version = self.version_groups[v.version])
        );
        x.versions.forEach((v) => {
          Object.keys(v.parents).forEach((vv) =>
            self.version_groups[vv] && self.version_groups[vv].forEach((vv) => (v.parents[vv] = true))
          );
        });
      }

      orig_send(x);
    };

    /// # antimatter_crdt.receive(message)
    ///
    /// Let this antimatter object "receive" a message from another antimatter object, presumably from its `send` callback.
    /// ``` js
    /// websocket.on('message', data => {
    ///     antimatter_crdt.receive(JSON.parse(data)) });
    /// ```
    /// You generally do not need to mess with a message object directly, but below are the various message objects you might see, categorized by their `cmd` entry. Note that each object also
    ///   contains a `conn` entry with the id of the connection the message is sent
    ///   over.
    self.receive = (x) => {
      let {
        cmd,
        version,
        parents,
        patches,
        versions,
        fissure,
        fissures,
        seen,
        forget,
        marco,
        peer,
        conn,
      } = x;

      if (version && typeof version != "string") {
        if (!self.T[version[0]]) version = add_version_group(version);
        else version = version[0];
      }
      if (parents) {
        parents = { ...parents };
        Object.keys(parents).forEach((v) => {
          if (self.version_groups[v] && self.version_groups[v][0] != v)
            delete parents[v];
        });
      }

      if (versions && versions.forEach) versions.forEach((v) => {
        if (typeof v.version != "string") {
          if (!self.T[v.version[0]]) v.version = add_version_group(v.version);
          else v.version = v.version[0];
        }
        v.parents = { ...v.parents };
        Object.keys(v.parents).forEach((vv) => {
          if (self.version_groups[vv] && self.version_groups[vv][0] != vv)
            delete v.parents[vv];
        });
      });

      let marco_versions_array = version
        ? [version]
        : versions && !Array.isArray(versions)
        ? Object.keys(versions).sort()
        : null;
      let marco_versions =
        marco_versions_array &&
        Object.fromEntries(marco_versions_array.map((v) => [v, true]));

      if (versions && !Array.isArray(versions)) {
        versions = { ...versions };
        Object.keys(versions).forEach((v) => {
          if (self.version_groups[v] && self.version_groups[v][0] != v)
            delete versions[v];
        });
        if (!Object.keys(versions).length) return;
      }

      /// ## message `get`
      /// `get` is the first message sent over a connection, and the peer at the other end will respond with `welcome`.
      /// ``` js
      /// { cmd: 'get',
      ///   peer: 'SENDER_ID',
      ///   conn: 'CONN_ID',
      ///   parents: {'PARENT_VERSION_ID': true, ...} }
      /// ```
      /// The `parents` are optional, and describes which versions this peer already has. The other end will respond with versions since that set of parents.
      if (cmd == "get" || (cmd == "welcome" && peer != null)) {
        if (self.conns[conn] != null) throw Error("bad");
        self.conns[conn] = { peer, seq: ++self.conn_count };
      }

      /// ## message `fissure`
      ///
      /// Sent to alert peers about a fissure. The `fissure` entry contains information about the two peers involved in the fissure, the specific connection id that broke, the `versions` that need to be protected, and the `time` of the fissure (in case we want to ignore it after some time). It is also possible to send multiple `fissures` in an array.
      /// ``` js
      /// { cmd: 'fissure',
      ///   fissure: { // or fissures: [{...}, {...}, ...],
      ///     a: 'PEER_A_ID',
      ///     b:  'PEER_B_ID',
      ///     conn: 'CONN_ID',
      ///     versions: {'VERSION_ID': true, ...},
      ///     time: Date.now()
      ///   },
      ///   conn: 'CONN_ID' }
      /// ```
      /// Note that `time` isn't used for anything critical, as it's just wallclock time.
      if (fissure) fissures = [fissure];

      if (fissures) fissures.forEach((f) => (f.t = self.conn_count));

      if (versions && (cmd == "set" || cmd == "welcome"))
        versions = Object.fromEntries(versions.map((v) => [v.version, v]));
      if (version) versions = { [version]: true };

      let rebased_patches = [];

      let fissures_back = [];
      let fissures_forward = [];
      let fissures_done = {};

      function copy_fissures(fs) {
        return fs.map((f) => {
          f = JSON.parse(JSON.stringify(f));
          delete f.t;
          return f;
        });
      }

      if (fissures) {
        let fiss_map = Object.fromEntries(
          fissures.map((f) => [f.a + ":" + f.b + ":" + f.conn, f])
        );
        for (let [key, f] of Object.entries(fiss_map)) {
          if (fissures_done[f.conn]) continue;
          fissures_done[f.conn] = true;

          let our_f = self.fissures[key];
          let other_key = f.b + ":" + f.a + ":" + f.conn;
          let their_other = fiss_map[other_key];
          let our_other = self.fissures[other_key];

          if (!our_f) self.fissures[key] = f;
          if (their_other && !our_other) self.fissures[other_key] = their_other;

          if (!their_other && !our_other && f.b == self.id) {
            if (self.conns[f.conn]) delete self.conns[f.conn];
            our_other = self.fissures[other_key] = {
              ...f,
              a: f.b,
              b: f.a,
              t: self.conn_count,
            };
          }

          if (!their_other && our_other) {
            fissures_back.push(f);
            fissures_back.push(our_other);
          }

          if (!our_f || (their_other && !our_other)) {
            fissures_forward.push(f);
            if (their_other || our_other)
              fissures_forward.push(their_other || our_other);
          }
        }
      }

      /// ## message `welcome`
      /// Sent in response to a `get`, basically contains the initial state of the document; incoming `welcome` messages are also propagated over all our other connections but only with information that was new to us, so the propagation will eventually stop. When sent in response to a `get` (rather than being propagated), we include a `peer` entry with the id of the sending peer, so they know who we are, and to trigger them to send us their own  `welcome` message.
      ///
      /// ``` js
      /// {
      ///   cmd: 'welcome',
      ///   versions: [
      ///     //each version looks like a set message...
      ///   ],
      ///   fissures: [
      ///     //each fissure looks as it would in a fissure message...
      ///   ],
      ///   parents: 
      ///     {
      ///       //versions you must have before consuming these new versions
      ///       'PARENT_VERSION_ID': true,
      ///       ...
      ///     },
      ///   [peer: 'SENDER_ID'], // if responding to a get
      ///   conn: 'CONN_ID'
      /// } 
      /// ```
      let _T = {};
      let added_versions = [];
      if (cmd == "welcome") {
        var versions_to_add = {};
        let vs = Object.values(versions);
        vs.forEach((v) => (versions_to_add[v.version] = v.parents));
        vs.forEach((v) => {
          if (
            self.T[v.version] ||
            (self.version_groups[v.version] &&
              self.version_groups[v.version][0] != v.version)
          ) {
            remove_ancestors(v.version);
            function remove_ancestors(v) {
              if (versions_to_add[v]) {
                Object.keys(versions_to_add[v]).forEach(remove_ancestors);
                delete versions_to_add[v];
              }
            }
          }
        });

        for (let v of vs) _T[v.version] = v.parents;

        l1: for (var v of vs) {
          if (versions_to_add[v.version]) {
            let ps = Object.keys(v.parents);

            if (!ps.length && Object.keys(self.T).length) continue;
            for (p of ps) if (!self.T[p]) continue l1;

            rebased_patches = rebased_patches.concat(
              self.add_version(v.version, v.parents, v.patches, v.sort_keys)
            );

            added_versions.push(v);
            delete _T[v.version];
          }
        }
      }

      if (cmd == "get" || (cmd == "welcome" && peer != null)) {
        let fissures_back = Object.values(self.fissures);

        if (cmd == "welcome") {
          var leaves = { ..._T };
          Object.keys(_T).forEach((v) => {
            Object.keys(_T[v]).forEach((p) => delete leaves[p]);
          });

          let f = {
            a: self.id,
            b: peer,
            conn: "-" + conn,
            versions: Object.fromEntries(
              added_versions
                .concat(Object.keys(leaves).map((v) => versions[v]))
                .map((v) => [v.version, true])
            ),
            time: get_time(),
            t: self.conn_count,
          };
          if (Object.keys(f.versions).length) {
            let key = f.a + ":" + f.b + ":" + f.conn;
            self.fissures[key] = f;
            fissures_back.push(f);
            fissures_forward.push(f);
          }
        }

        send({
          cmd: "welcome",
          versions: self.generate_braid(parents || versions),
          fissures: copy_fissures(fissures_back),
          parents:
            parents &&
            Object.keys(parents).length &&
            self.get_leaves(self.ancestors(parents, true)),
          ...(cmd == "get" ? { peer: self.id } : {}),
          conn,
        });
      } else if (fissures_back.length) {
        send({
          cmd: "fissure",
          fissures: copy_fissures(fissures_back),
          conn,
        });
      }

      /// ## message `forget`
      /// Used to disconnect without creating a fissure, presumably meaning the sending peer doesn't plan to make any edits while they're disconnected.
      /// ``` js
      /// {cmd: 'forget', conn: 'CONN_ID'}
      /// ```
      if (cmd == "forget") {
        if (self.conns[conn] == null) throw Error("bad");
        send({ cmd: "ack", forget: true, conn });

        delete self.conns[conn];
        delete self.proto_conns[conn];
      }

      /// ## message forget `ack` 
      /// Sent in response to `forget`.. so they know we forgot them.
      /// ``` js
      /// {cmd: 'ack', forget: true, conn: 'CONN_ID'}
      /// ```
      if (cmd == "ack" && forget) {
        self.forget_cbs[conn]();
      }

      /// ## message `set`
      /// Sent to alert peers about a change in the document. The change is represented as a version, with a unique id, a set of parent versions (the most recent versions known before adding this version), and an array of patches, where the offsets in the patches do not take into account the application of other patches in the same array.
      /// ``` js
      /// { cmd: 'set',
      ///   version: 'VERSION_ID',
      ///   parents: {'PARENT_VERSION_ID': true, ...},
      ///   patches: [ {range: '.json.path.a.b', content: 42}, ... ],
      ///   conn: 'CONN_ID' }
      /// ```
      if (cmd == "set") {
        if (conn == null || !self.T[version]) {
          let ps = Object.keys(parents);

          if (!ps.length && Object.keys(self.T).length) return;
          for (p of ps) if (!self.T[p]) return;

          rebased_patches = self.add_version(version, parents, patches);

          for (let c of Object.keys(self.conns))
            if (c != conn)
              send({ cmd: "set", version, parents, patches, marco, conn: c });
        }
      }

      /// ## message `marco`
      /// Sent for pruning purposes, to try and establish whether everyone has seen the most recent versions. Note that a `set` message is treated as a `marco` message for the version being set.
      /// ``` js
      /// { cmd: 'marco',
      ///   version: 'MARCO_ID',
      ///   versions: {'VERSION_ID_A': true, ...},
      ///   conn: 'CONN_ID' }
      /// ```
      if (cmd == "marco" || cmd == "set") {
        if (!Object.keys(versions).every((v) => self.T[v])) return;

        if (
          self.marco_timeout &&
          marco_versions_array.length ==
            Object.keys(self.current_version).length &&
          marco_versions_array.every((x) => self.current_version[x])
        ) {
          clear_timeout(self.marco_timeout);
          self.marco_timeout = null;
        }

        let m = self.marcos[marco];
        if (!m) {
          m = self.marcos[marco] = {
            id: marco,
            origin: conn,
            count: Object.keys(self.conns).length - (conn != null ? 1 : 0),
            versions: marco_versions,
            seq: self.conn_count,
            time: get_time(),
          };
          m.orig_count = m.count;
          m.real_marco = cmd == "marco";
          m.key = JSON.stringify(Object.keys(m.versions).sort());
          self.marco_map[m.key] = self.marco_map[m.key] || {};
          let before = Object.keys(self.marco_map[m.key]).length;
          self.marco_map[m.key][m.id] = true;
          let after = Object.keys(self.marco_map[m.key]).length;
          if (before == 1 && after == 2 && self.marco_increases_allowed > 0) {
            self.marco_current_wait_time *= 2;
            self.marco_increases_allowed--;
          }

          if (cmd == "marco")
            for (let c of Object.keys(self.conns))
              if (c != conn)
                send({
                  cmd: "marco",
                  marco,
                  versions: marco_versions,
                  conn: c,
                });
        } else if (m.seq < self.conns[conn].seq) {
          send({
            cmd: "ack",
            seen: "local",
            marco,
            versions: marco_versions,
            conn,
          });
          return;
        } else m.count--;
        check_marco_count(marco);
      }

      /// ## message local `ack`
      /// Sent in response to `set`, but not right away; a peer will first send the `set` over all its other connections, and only after they have all responded with a local `ack` – and we didn't see a `fissure` message while waiting – will the peer send a local `ack` over the originating connection.
      /// ``` js
      /// {cmd: 'ack', seen: 'local', version: 'VERSION_ID', conn: 'CONN_ID'}
      /// ```
      if (cmd == "ack" && seen == "local") {
        let m = self.marcos[marco];
        if (!m || m.cancelled) return;
        m.count--;
        check_marco_count(marco);
      }
      function check_marco_count(marco) {
        let m = self.marcos[marco];
        if (m && m.count === 0 && !m.cancelled) {
          m.time2 = get_time();
          if (m.orig_count > 0) {
            let t = m.time2 - m.time;
            let weight = 0.1;
            self.marco_time_est_1 =
              weight * t + (1 - weight) * self.marco_time_est_1;
          }
          if (m.origin != null) {
            if (self.conns[m.origin])
              send({
                cmd: "ack",
                seen: "local",
                marco,
                versions: marco_versions,
                conn: m.origin,
              });
          } else add_full_ack_leaves(marco);
        }
      }

      /// ## message global `ack`
      /// Sent after an originating peer has received a local `ack` over all its connections, or after any peer receives a global `ack`, so that everyone may come to know that this version has been seen by everyone in this peer group.
      /// ``` js
      /// {cmd: 'ack', seen: 'global', version: 'VERSION_ID', conn: 'CONN_ID'}
      /// ```
      if (cmd == "ack" && seen == "global") {
        let m = self.marcos[marco];

        if (!m || m.cancelled) return;

        let t = get_time() - m.time2;
        let weight = 0.1;
        self.marco_time_est_2 =
          weight * t + (1 - weight) * self.marco_time_est_2;

        if (m.real_marco && Object.keys(self.marco_map[m.key]).length == 1) {
          self.marco_current_wait_time *= 0.8;
        }

        add_full_ack_leaves(marco, conn);
      }
      function add_full_ack_leaves(marco, conn) {
        let m = self.marcos[marco];
        if (!m || m.cancelled) return;
        m.cancelled = true;

        for (let [c, cc] of Object.entries(self.conns))
          if (c != conn && cc.seq <= m.seq)
            send({
              cmd: "ack",
              seen: "global",
              marco,
              versions: marco_versions,
              conn: c,
            });

        for (let v of Object.keys(m.versions)) {
          if (!self.T[v]) continue;
          let marks = {};
          let f = (v) => {
            if (!marks[v]) {
              marks[v] = true;
              delete self.acked_boundary[v];
              Object.keys(self.T[v]).forEach(f);
            }
          };
          f(v);
          self.acked_boundary[v] = true;
        }
        prune(false, m.seq);
      }

      if (added_versions.length || fissures_forward.length) {
        for (let c of Object.keys(self.conns))
          if (c != conn)
            send({
              cmd: added_versions.length ? "welcome" : "fissure",
              ...(added_versions.length ? { versions: added_versions } : {}),
              fissures: copy_fissures(fissures_forward),
              conn: c,
            });
      }

      if (fissures_forward.length) resolve_fissures();

      if (
        !self.marco_timeout &&
        cmd != "set" &&
        cmd != "marco" &&
        prune(true)
      ) {
        if (!self.marco_current_wait_time) {
          self.marco_current_wait_time =
            4 * (self.marco_time_est_1 + self.marco_time_est_2);
        }

        let t = Math.random() * self.marco_current_wait_time;

        self.marco_timeout = set_timeout(() => {
          self.marco_increases_allowed = 1;
          self.marco_timeout = null;
          if (prune(true)) self.marco();
        }, t);
      }

      if (cmd == "welcome" && peer == null && prune(true, null, true))
        self.marco();

      return rebased_patches;
    };

    /// # antimatter_crdt.get(conn) or connect(conn)
    ///
    /// Register a new connection with id `conn` – triggers this antimatter_crdt object to send a `get` message over the given connection. 
    ///
    /// ``` js
    /// alice_antimatter_crdt.get('connection_to_bob')
    /// ```
    self.get = (conn) => {
      self.proto_conns[conn] = true;
      send({ cmd: "get", peer: self.id, conn });
    };
    self.connect = self.get;

    /// # antimatter_crdt.forget(conn)
    ///
    /// Disconnect the given connection without creating a fissure – we don't need to reconnect with them.. it seems.. if we do, then we need to call `disconnect` instead, which will create a fissure allowing us to reconnect.
    ///
    /// ``` js
    /// alice_antimatter_crdt.forget('connection_to_bob')
    /// ```
    self.forget = async (conn) => {
      await new Promise((done) => {
        if (self.conns[conn] != null) {
          self.forget_cbs[conn] = done;
          send({ cmd: "forget", conn });
        }
        self.disconnect(conn, false);
      });
    };

    /// # antimatter_crdt.disconnect(conn)
    ///
    /// If we detect that a connection has closed, let the antimatter_crdt object know by calling this method with the given connection id – this will create a fissure so we can reconnect with whoever was on the other end of the connection later on. 
    ///
    /// ``` js
    /// alice_antimatter_crdt.disconnect('connection_to_bob')
    /// ```
    self.disconnect = (conn, fissure = true) => {
      if (self.conns[conn] == null && !self.proto_conns[conn]) return;
      delete self.proto_conns[conn];

      if (self.conns[conn]) {
        let peer = self.conns[conn].peer;
        delete self.conns[conn];

        if (fissure) {
          fissure = create_fissure(peer, conn);
          if (fissure) self.receive({ cmd: "fissure", fissure });
        }
      }
    };

    /// # antimatter_crdt.set(...patches)
    ///
    /// Modify this antimatter_crdt object by applying the given patches. Each patch looks like `{range: '.life.meaning', content: 42}`. Calling this method will trigger calling the `send` callback to let our peers know about this change. 
    ///
    /// ``` js
    /// antimatter_crdt.set({
    ///   range: '.life.meaning',
    ///   content: 42
    /// })
    /// ```
    self.set = (...patches) => {
      var version = `${self.next_seq++}@${self.id}`;
      self.receive({
        cmd: "set",
        version,
        parents: { ...self.current_version },
        patches,
        marco: Math.random().toString(36).slice(2),
      });
      return version;
    };

    /// # antimatter_crdt.marco()
    ///
    /// Initiate sending a `marco` message to try and establish whether certain versions can be pruned. 
    ///
    /// ``` js
    /// antimatter_crdt.marco()
    /// ```
    self.marco = () => {
      let versions = { ...self.current_version };
      Object.keys(versions).forEach((v) =>
        self.version_groups[v] && self.version_groups[v].forEach((v) => (versions[v] = true))
      );

      let marco = Math.random().toString(36).slice(2);
      self.receive({ cmd: "marco", marco, versions });
      return marco;
    };

    function cancel_marcos() {
      for (let m of Object.values(self.marcos)) m.cancelled = true;
    }

    function create_fissure(peer, conn) {
      let ack_versions = self.ancestors(self.acked_boundary);

      let entries = Object.keys(self.T)
        .filter((v) => !ack_versions[v] || self.acked_boundary[v])
        .map((v) => [v, true]);
      if (!entries.length) return;
      let versions = Object.fromEntries(entries);
      return { a: self.id, b: peer, conn, versions, time: get_time() };
    }

    function resolve_fissures() {
      let unfissured = {};

      Object.entries(self.fissures).forEach(([fk, f]) => {
        var other_key = f.b + ":" + f.a + ":" + f.conn;
        var other = self.fissures[other_key];
        if (other) {
          if (Object.keys(f.versions).length) {
            for (let v of Object.keys(f.versions)) unfissured[v] = true;
            self.fissures[fk] = { ...f, versions: {} };
          }
          if (Object.keys(other.versions).length) {
            for (let v of Object.keys(other.versions)) unfissured[v] = true;
            self.fissures[other_key] = { ...other, versions: {} };
          }
        }
      });

      if (Object.keys(unfissured).length) {
        cancel_marcos();

        let ack_versions = self.ancestors(self.acked_boundary);
        let unfissured_descendants = self.descendants(unfissured, true);
        for (let un of Object.keys(unfissured_descendants))
          if (ack_versions[un]) delete ack_versions[un];
        self.acked_boundary = self.get_leaves(ack_versions);
      }
    }

    function prune(just_checking, t, just_versions) {
      if (just_checking) t = Infinity;

      let fissures = just_checking ? { ...self.fissures } : self.fissures;

      Object.entries(fissures).forEach((x) => {
        var other_key = x[1].b + ":" + x[1].a + ":" + x[1].conn;
        var other = fissures[other_key];
        if (other && x[1].t <= t && other.t <= t) {
          delete fissures[x[0]];
          delete fissures[other_key];
        }
      });

      if (self.fissure_lifetime != null) {
        var now = get_time();
        Object.entries(fissures).forEach(([k, f]) => {
          if (f.time == null) f.time = now;
          if (f.time <= now - self.fissure_lifetime) {
            delete fissures[k];
          }
        });
      }

      if (
        just_checking &&
        !just_versions &&
        Object.keys(fissures).length < Object.keys(self.fissures).length
      )
        return true;

      var restricted = {};

      Object.values(fissures).forEach((f) => {
        Object.keys(f.versions).forEach((v) => (restricted[v] = true));
      });

      if (!just_checking) {
        var acked = self.ancestors(self.acked_boundary);
        Object.keys(self.T).forEach((x) => {
          if (!acked[x]) restricted[x] = true;
        });
      }

      let children = self.get_child_map();
      let { parent_sets, child_sets } = get_parent_and_child_sets(children);

      let to_bubble = {};
      function mark_bubble(v, bubble) {
        if (to_bubble[v]) return;
        to_bubble[v] = bubble;
        for (let vv of Object.keys(self.T[v])) mark_bubble(vv, bubble);
      }
      let visited = {};
      function f(cur) {
        if (!self.T[cur] || visited[cur]) return;
        visited[cur] = true;

        if (
          to_bubble[cur] == null &&
          parent_sets[cur] &&
          !parent_sets[cur].done
        ) {
          parent_sets[cur].done = true;
          let bottom = parent_sets[cur].members;
          let top = find_one_bubble(bottom, children, child_sets, restricted);
          if (top) {
            if (just_checking) return true;
            let bottom_array = Object.keys(bottom).sort();
            let top_array = Object.keys(top);
            raw_add_version_group(bottom_array);
            let bubble = [bottom_array[0], top_array[0]];
            for (let v of top_array) to_bubble[v] = bubble;
            for (let v of bottom_array) mark_bubble(v, bubble);
          }
        }
        if (to_bubble[cur] == null) {
          let top = find_one_bubble(
            { [cur]: true },
            children,
            child_sets,
            restricted
          );
          if (top && !top[cur]) {
            if (just_checking) return true;
            let bubble = [cur, Object.keys(top)[0]];
            for (let v of Object.keys(top)) to_bubble[v] = bubble;
            mark_bubble(bubble[0], bubble);
          } else {
            to_bubble[cur] = [cur, cur];
          }
        }
        return Object.keys(
          self.T[cur] || self.T[self.version_groups[cur][0]]
        ).some(f);
      }
      if (Object.keys(self.current_version).some(f) && just_checking)
        return true;

      self.apply_bubbles(to_bubble);

      for (let [k, m] of Object.entries(self.marcos)) {
        let vs = Object.keys(m.versions);
        if (
          !vs.length ||
          !vs.every((v) => self.T[v] || self.version_groups[v])
        ) {
          delete self.marcos[k];
          delete self.marco_map[m.key][m.id];
          if (!Object.keys(self.marco_map[m.key]).length)
            delete self.marco_map[m.key];
        }
      }

      for (let [v, vs] of Object.entries(self.version_groups)) {
        if (!self.T[vs[0]]) delete self.version_groups[v];
      }
    }

    return self;
  };

  /// ## create_json_crdt([init])
  ///
  /// Create a new `json_crdt` object (or start with `init`, and add stuff to that). 
  ///
  /// ``` js
  /// var json_crdt = create_json_crdt()
  /// ``` 
  create_json_crdt = (self) => {
    self = self || {};
    self.S = self.S || null;
    self.T = self.T || {};
    self.root_version = null;
    self.current_version = self.current_version || {};
    self.version_cache = self.version_cache || {};

    let is_lit = (x) => !x || typeof x != "object" || x.t == "lit";
    let get_lit = (x) => (x && typeof x == "object" && x.t == "lit" ? x.S : x);
    let make_lit = (x) => (x && typeof x == "object" ? { t: "lit", S: x } : x);
    self = self || {};

    /// # json_crdt.read()
    ///
    /// Returns an instance of the `json` object represented by this json_crdt data-structure. 
    ///
    /// ``` js
    /// console.log(json_crdt.read())
    /// ```
    self.read = (is_anc) => {
      if (!is_anc) is_anc = () => true;

      return raw_read(self.S, is_anc);
    };

    function raw_read(x, is_anc) {
      if (x && typeof x == "object") {
        if (x.t == "lit") return JSON.parse(JSON.stringify(x.S));
        if (x.t == "val")
          return raw_read(sequence_crdt.get(x.S, 0, is_anc), is_anc);
        if (x.t == "obj") {
          var o = {};
          Object.entries(x.S).forEach(([k, v]) => {
            var x = raw_read(v, is_anc);
            if (x != null) o[k] = x;
          });
          return o;
        }
        if (x.t == "arr") {
          var a = [];
          sequence_crdt.traverse(
            x.S,
            is_anc,
            (node, _, __, ___, ____, deleted) => {
              if (!deleted)
                node.elems.forEach((e) => a.push(raw_read(e, is_anc)));
            },
            true
          );
          return a;
        }
        if (x.t == "str") {
          var s = [];
          sequence_crdt.traverse(
            x.S,
            is_anc,
            (node, _, __, ___, ____, deleted) => {
              if (!deleted) s.push(node.elems);
            },
            true
          );
          return s.join("");
        }
        throw Error("bad");
      }
      return x;
    }

    /// # json_crdt.generate_braid(versions)
    ///
    /// Returns an array of `set` messages that each look like this: `{version, parents, patches, sort_keys}`, such that if we pass all these messages to `antimatter_crdt.receive()`, we'll reconstruct the data in this `json_crdt` data-structure, assuming the recipient already has the given `versions` (each version is represented as an object with a version, and each value is `true`).
    ///
    /// ``` js
    /// json_crdt.generate_braid({
    ///   alice2: true, 
    ///   bob3: true
    /// })
    /// ```
    self.generate_braid = (versions) => {
      var anc =
        versions && Object.keys(versions).length
          ? self.ancestors(versions, true)
          : {};
      var is_anc = (x) => anc[x];

      if (Object.keys(self.T).length === 0) return [];

      return Object.entries(self.version_cache)
        .filter((x) => !is_anc(x[0]))
        .map(([version, set_message]) => {
          return (self.version_cache[version] =
            set_message || generate_set_message(version));
        });

      function generate_set_message(version) {
        if (!Object.keys(self.T[version]).length) {
          return {
            version,
            parents: {},
            patches: [{ range: "", content: self.read((v) => v == version) }],
          };
        }

        var is_lit = (x) => !x || typeof x !== "object" || x.t === "lit";
        var get_lit = (x) =>
          x && typeof x === "object" && x.t === "lit" ? x.S : x;

        var ancs = self.ancestors({ [version]: true });
        delete ancs[version];
        var is_anc = (x) => ancs[x];
        var path = [];
        var patches = [];
        var sort_keys = {};
        recurse(self.S);
        function recurse(x) {
          if (is_lit(x)) {
          } else if (x.t === "val") {
            sequence_crdt
              .generate_braid(x.S, version, is_anc, raw_read)
              .forEach((s) => {
                if (s[2].length) {
                  patches.push({ range: path.join(""), content: s[2][0] });
                  if (s[3]) sort_keys[patches.length - 1] = s[3];
                }
              });
            sequence_crdt.traverse(x.S, is_anc, (node) => {
              node.elems.forEach(recurse);
            });
          } else if (x.t === "arr") {
            sequence_crdt.generate_braid(x.S, version, is_anc).forEach((s) => {
              patches.push({
                range: `${path.join("")}[${s[0]}:${s[0] + s[1]}]`,
                content: s[2],
              });
              if (s[3]) sort_keys[patches.length - 1] = s[3];
            });
            var i = 0;
            sequence_crdt.traverse(x.S, is_anc, (node) => {
              node.elems.forEach((e) => {
                path.push(`[${i++}]`);
                recurse(e);
                path.pop();
              });
            });
          } else if (x.t === "obj") {
            Object.entries(x.S).forEach((e) => {
              path.push("[" + JSON.stringify(e[0]) + "]");
              recurse(e[1]);
              path.pop();
            });
          } else if (x.t === "str") {
            sequence_crdt.generate_braid(x.S, version, is_anc).forEach((s) => {
              patches.push({
                range: `${path.join("")}[${s[0]}:${s[0] + s[1]}]`,
                content: s[2],
              });
              if (s[3]) sort_keys[patches.length - 1] = s[3];
            });
          }
        }

        return {
          version,
          parents: { ...self.T[version] },
          patches,
          sort_keys,
        };
      }
    };

    /// # json_crdt.apply_bubbles(to_bubble)
    ///
    /// This method helps prune away meta data and compress stuff when we have determined that certain versions can be renamed to other versions – these renamings are expressed in `to_bubble`, where keys are versions and values are "bubbles", each bubble is represented with an array of two elements, the first element is the "bottom" of the bubble, and the second element is the "top" of the bubble. We will use the "bottom" as the new name for the version, and we'll use the "top" as the new parents.
    ///
    /// ``` js 
    /// json_crdt.apply_bubbles({
    ///   alice4: ['bob5', 'alice4'], 
    ///   bob5: ['bob5', 'alice4']
    /// }) 
    /// ```
    self.apply_bubbles = (to_bubble) => {
      function recurse(x) {
        if (is_lit(x)) return x;
        if (x.t == "val") {
          sequence_crdt.apply_bubbles(x.S, to_bubble);
          sequence_crdt.traverse(
            x.S,
            () => true,
            (node) => {
              node.elems = node.elems.slice(0, 1).map(recurse);
            },
            true
          );
          if (
            x.S.nexts.length == 0 &&
            !x.S.next &&
            x.S.elems.length == 1 &&
            is_lit(x.S.elems[0])
          )
            return x.S.elems[0];
          return x;
        }
        if (x.t == "arr") {
          sequence_crdt.apply_bubbles(x.S, to_bubble);
          sequence_crdt.traverse(
            x.S,
            () => true,
            (node) => {
              node.elems = node.elems.map(recurse);
            },
            true
          );
          if (
            x.S.nexts.length == 0 &&
            !x.S.next &&
            x.S.elems.every(is_lit) &&
            !Object.keys(x.S.deleted_by).length
          )
            return { t: "lit", S: x.S.elems.map(get_lit) };
          return x;
        }
        if (x.t == "obj") {
          Object.entries(x.S).forEach((e) => {
            var y = (x.S[e[0]] = recurse(e[1]));
            if (y == null) delete x.S[e[0]];
          });
          if (Object.values(x.S).every(is_lit)) {
            var o = {};
            Object.entries(x.S).forEach((e) => (o[e[0]] = get_lit(e[1])));
            return { t: "lit", S: o };
          }
          return x;
        }
        if (x.t == "str") {
          sequence_crdt.apply_bubbles(x.S, to_bubble);
          if (
            x.S.nexts.length == 0 &&
            !x.S.next &&
            !Object.keys(x.S.deleted_by).length
          )
            return x.S.elems;
          return x;
        }
      }
      self.S = recurse(self.S);

      Object.entries(to_bubble).forEach(([version, bubble]) => {
        if (!self.T[version]) return;

        self.my_where_are_they_now[version] = bubble[0];

        if (version === bubble[1]) self.T[bubble[0]] = self.T[bubble[1]];

        if (version !== bubble[0]) {
          if (self.root_version == version) self.root_version = bubble[0];
          delete self.T[version];
          delete self.version_cache[version];
          delete self.acked_boundary[version];
          delete self.current_version[version];
          if (
            self.version_groups[version] &&
            self.version_groups[version][0] == version
          ) {
            for (let v of self.version_groups[version]) {
              delete self.version_groups[v];
            }
          }
          for (let [k, parents] of Object.entries(self.T)) {
            self.T[k] = parents = { ...parents };
            for (let p of Object.keys(parents)) {
              if (p == version) delete parents[p];
            }
          }
        } else self.version_cache[version] = null;
      });

      var leaves = Object.keys(self.current_version);
      var acked_boundary = Object.keys(self.acked_boundary);
      var fiss = Object.keys(self.fissures);
      if (
        leaves.length == 1 &&
        acked_boundary.length == 1 &&
        leaves[0] == acked_boundary[0] &&
        fiss.length == 0
      ) {
        self.T = { [leaves[0]]: {} };
        self.S = make_lit(self.read());
      }
    };

    /// # json_crdt.add_version(version, parents, patches[, sort_keys])
    ///
    /// The main method for modifying a `json_crdt` data structure. 
    ///
    /// * `version`: Unique string associated with this edit. 
    /// * `parents`: A set of versions that this version is aware of, represented as a map with versions as keys, and values of `true`. 
    /// * `patches`: An array of patches, each patch looks like this `{range: '.life.meaning', content: 42}`. 
    /// * `sort_keys`: (optional) An object where each key is an index, and the value is a sort_key to use with the patch at the given index in the `patches` array – a sort_key overrides the version for a patch for the purposes of sorting. This can be useful after doing some pruning. 
    ///
    /// ``` js
    /// json_crdt.add_version(
    ///   'alice6', 
    ///   {
    ///     alice5: true, 
    ///     bob7: true
    ///   }, 
    ///   [
    ///     {
    ///       range: '.a.b', 
    ///       content: 'c'
    ///     }
    ///   ]
    /// )
    /// ``` 
    self.add_version = (version, parents, patches, sort_keys) => {
      if (self.T[version]) return;

      if (self.root_version == null) self.root_version = version;

      self.T[version] = { ...parents };

      self.version_cache[version] = JSON.parse(
        JSON.stringify({
          version,
          parents,
          patches,
          sort_keys,
        })
      );

      Object.keys(parents).forEach((k) => {
        if (self.current_version[k]) delete self.current_version[k];
      });
      self.current_version[version] = true;

      if (!sort_keys) sort_keys = {};

      if (!Object.keys(parents).length) {
        var parse = self.parse_patch(patches[0]);
        self.S = make_lit(parse.value);
        return patches;
      }

      let is_anc;
      if (parents == self.current_version) {
        is_anc = (_version) => _version != version;
      } else {
        let ancs = self.ancestors(parents);
        is_anc = (_version) => ancs[_version];
      }

      var rebased_patches = [];
      patches.forEach((patch, i) => {
        var sort_key = sort_keys[i];
        var parse = self.parse_patch(patch);
        var cur = resolve_path(parse);
        if (!parse.slice) {
          if (cur.t != "val") throw Error("bad");
          var len = sequence_crdt.length(cur.S, is_anc);
          sequence_crdt.add_version(
            cur.S,
            version,
            [[0, len, [parse.delete ? null : make_lit(parse.value)], sort_key]],
            is_anc
          );
          rebased_patches.push(patch);
        } else {
          if (typeof parse.value === "string" && cur.t !== "str")
            throw Error(
              `Cannot splice string ${JSON.stringify(
                parse.value
              )} into non-string`
            );
          if (parse.value instanceof Array && cur.t !== "arr")
            throw Error(
              `Cannot splice array ${JSON.stringify(
                parse.value
              )} into non-array`
            );
          if (parse.value instanceof Array)
            parse.value = parse.value.map((x) => make_lit(x));

          var r0 = parse.slice[0];
          var r1 = parse.slice[1];
          if (r0 < 0 || Object.is(r0, -0) || r1 < 0 || Object.is(r1, -0)) {
            let len = sequence_crdt.length(cur.S, is_anc);
            if (r0 < 0 || Object.is(r0, -0)) r0 = len + r0;
            if (r1 < 0 || Object.is(r1, -0)) r1 = len + r1;
          }

          var rebased_splices = sequence_crdt.add_version(
            cur.S,
            version,
            [[r0, r1 - r0, parse.value, sort_key]],
            is_anc
          );
          for (let rebased_splice of rebased_splices)
            rebased_patches.push({
              range: `${parse.path
                .map((x) => `[${JSON.stringify(x)}]`)
                .join("")}[${rebased_splice[0]}:${
                rebased_splice[0] + rebased_splice[1]
              }]`,
              content: rebased_splice[2],
            });
        }
      });

      function resolve_path(parse) {
        var cur = self.S;
        if (!cur || typeof cur != "object" || cur.t == "lit")
          cur = self.S = {
            t: "val",
            S: sequence_crdt.create_node(self.root_version, [cur]),
          };
        var prev_S = null;
        var prev_i = 0;
        for (var i = 0; i < parse.path.length; i++) {
          var key = parse.path[i];
          if (cur.t == "val")
            cur = sequence_crdt.get((prev_S = cur.S), (prev_i = 0), is_anc);
          if (cur.t == "lit") {
            var new_cur = {};
            if (cur.S instanceof Array) {
              new_cur.t = "arr";
              new_cur.S = sequence_crdt.create_node(
                self.root_version,
                cur.S.map((x) => make_lit(x))
              );
            } else {
              if (typeof cur.S != "object") throw Error("bad");
              new_cur.t = "obj";
              new_cur.S = {};
              Object.entries(cur.S).forEach(
                (e) => (new_cur.S[e[0]] = make_lit(e[1]))
              );
            }
            cur = new_cur;
            sequence_crdt.set(prev_S, prev_i, cur, is_anc);
          }
          if (cur.t == "obj") {
            let x = cur.S[key];
            if (!x || typeof x != "object" || x.t == "lit")
              x = cur.S[key] = {
                t: "val",
                S: sequence_crdt.create_node(self.root_version, [
                  x == null ? null : x,
                ]),
              };
            cur = x;
          } else if (i == parse.path.length - 1 && !parse.slice) {
            parse.slice = [key, key + 1];
            parse.value = cur.t == "str" ? parse.value : [parse.value];
          } else if (cur.t == "arr") {
            cur = sequence_crdt.get((prev_S = cur.S), (prev_i = key), is_anc);
          } else throw Error("bad");
        }
        if (parse.slice) {
          if (cur.t == "val")
            cur = sequence_crdt.get((prev_S = cur.S), (prev_i = 0), is_anc);
          if (typeof cur == "string") {
            cur = {
              t: "str",
              S: sequence_crdt.create_node(self.root_version, cur),
            };
            sequence_crdt.set(prev_S, prev_i, cur, is_anc);
          } else if (cur.t == "lit") {
            if (!(cur.S instanceof Array)) throw Error("bad");
            cur = {
              t: "arr",
              S: sequence_crdt.create_node(
                self.root_version,
                cur.S.map((x) => make_lit(x))
              ),
            };
            sequence_crdt.set(prev_S, prev_i, cur, is_anc);
          }
        }
        return cur;
      }

      return rebased_patches;
    };

    /// # json_crdt.get_child_map()
    ///
    /// Returns a map where each key is a version, and each value is a set of child versions, represented as a map with versions as keys, and values of `true`.
    ///
    /// ``` js
    /// json_crdt.get_child_map()
    /// ``` 
    self.get_child_map = () => {
      let children = {};
      Object.entries(self.T).forEach(([v, parents]) => {
        Object.keys(parents).forEach((parent) => {
          if (!children[parent]) children[parent] = {};
          children[parent][v] = true;
        });
      });
      return children;
    };

    /// # json_crdt.ancestors(versions, ignore_nonexistent=false)
    ///
    /// Gather `versions` and all their ancestors into a set. `versions` is a set of versions, i.e. a map with version-keys and values of true – we'll basically return a larger set. If `ignore_nonexistent` is `true`, then we won't throw an exception if we encounter a version that we don't have in our data-structure.
    ///
    /// ``` js
    /// json_crdt.ancestors({
    ///   alice12: true, 
    ///   bob10: true
    /// }) 
    /// ``` 
    self.ancestors = (versions, ignore_nonexistent) => {
      var result = {};
      function recurse(version) {
        if (result[version]) return;
        if (!self.T[version]) {
          if (ignore_nonexistent) return;
          throw Error(`The version ${version} no existo`);
        }
        result[version] = true;
        Object.keys(self.T[version]).forEach(recurse);
      }
      Object.keys(versions).forEach(recurse);
      return result;
    };

    /// # json_crdt.descendants(versions, ignore_nonexistent=false)
    ///
    /// Gather `versions` and all their descendants into a set. `versions` is a set of versions, i.e. a map with version-keys and values of true – we'll basically return a larger set. If `ignore_nonexistent` is `true`, then we won't throw an exception if we encounter a version that we don't have in our data-structure.
    ///
    /// ``` js
    /// json_crdt.descendants({
    ///   alice12: true, 
    ///   bob10: true
    /// }) 
    /// ``` 
    self.descendants = (versions, ignore_nonexistent) => {
      let children = self.get_child_map();
      var result = {};
      function recurse(version) {
        if (result[version]) return;
        if (!self.T[version]) {
          if (ignore_nonexistent) return;
          throw Error(`The version ${version} no existo`);
        }
        result[version] = true;
        Object.keys(children[version] || {}).forEach(recurse);
      }
      Object.keys(versions).forEach(recurse);
      return result;
    };

    /// # json_crdt.get_leaves(versions)
    ///
    /// Returns a set of versions from `versions` which don't also have a child in `versions`. `versions` is itself a set of versions, represented as an object with version keys and `true` values, and the return value is represented the same way.
    self.get_leaves = (versions) => {
      var leaves = { ...versions };
      Object.keys(versions).forEach((v) => {
        Object.keys(self.T[v]).forEach((p) => delete leaves[p]);
      });
      return leaves;
    };

    /// # json_crdt.parse_patch(patch)
    ///
    /// Takes a patch in the form `{range, content}`, and returns an object of the form `{path: [...], [slice: [...]], [delete: true], content}`; basically calling `parse_json_path` on `patch.range`, and adding `patch.content` along for the ride.
    self.parse_patch = (patch) => {
      let x = self.parse_json_path(patch.range);
      x.value = patch.content;
      return x;
    };

    /// # json_crdt.parse_json_path(json_path)
    ///
    /// Parses the string `json_path` into an object like: `{path: [...], [slice: [...]], [delete: true]}`. 
    ///
    /// * `a.b[3]` --> `{path: ['a', 'b', 3]}`
    /// * `a.b[3:5]` --> `{path: ['a', 'b'], slice: [3, 5]}`
    /// * `delete a.b` --> `{path: ['a', 'b'], delete: true}`
    ///
    /// ``` js
    /// console.log(json_crdt.parse_json_path('a.b.c'))
    /// ```
    self.parse_json_path = (json_path) => {
      var ret = { path: [] };
      var re =
        /^(delete)\s+|\.?([^\.\[ =]+)|\[((\-?\d+)(:\-?\d+)?|"(\\"|[^"])*")\]/g;
      var m;
      while ((m = re.exec(json_path))) {
        if (m[1]) ret.delete = true;
        else if (m[2]) ret.path.push(m[2]);
        else if (m[3] && m[5])
          ret.slice = [JSON.parse(m[4]), JSON.parse(m[5].substr(1))];
        else if (m[3]) ret.path.push(JSON.parse(m[3]));
      }
      return ret;
    };

    return self;
  };

  /// # sequence_crdt.create_node(version, elems, [end_cap, sort_key])
  ///
  /// Creates a node for a `sequence_crdt` sequence CRDT with the given properties. The resulting node will look like this:
  ///
  /// ``` js
  /// {
  ///   version, // globally unique string
  ///   elems, // a string or array representing actual data elements of the underlying sequence
  ///   end_cap, // this is useful for dealing with replace operations
  ///   sort_key, // version to pretend this is for the purposes of sorting
  ///   deleted_by : {}, // if this node gets deleted, we'll mark it here
  ///   nexts : [], // array of nodes following this one
  ///   next : null // final node following this one (after all the nexts)
  /// } 
  ///
  /// var sequence_node = sequence_crdt.create_node('alice1', 'hello')
  /// ```
  sequence_crdt.create_node = (version, elems, end_cap, sort_key) => ({
    version,
    sort_key,
    elems,
    end_cap,
    deleted_by: {},
    nexts: [],
    next: null,
  });

  /// # sequence_crdt.generate_braid(root_node, version, is_anc)
  ///  
  /// Reconstructs an array of splice-information which can be passed to `sequence_crdt.add_version` in order to add `version` to another `sequence_crdt` instance – the returned array looks like: `[[insert_pos, delete_count, insert_elems, sort_key], ...]`. `is_anc` is a function which accepts a version string and returns `true` if and only if the given version is an ancestor of `version` (i.e. a version which the author of `version` knew about when they created that version).
  ///
  /// ``` js
  /// var root_node = sequence_crdt.create_node('alice1', 'hello')
  /// console.log(sequence_crdt.generate_braid(root_node, 'alice1', x => false)) // outputs [0, 0, "hello"]
  /// ```
  sequence_crdt.generate_braid = (S, version, is_anc, read_array_elements) => {
    if (!read_array_elements) read_array_elements = (x) => x;
    var splices = [];

    function add_ins(offset, ins, sort_key, end_cap, is_row_header) {
      if (typeof ins !== "string")
        ins = ins.map((x) => read_array_elements(x, () => false));
      if (splices.length > 0) {
        var prev = splices[splices.length - 1];
        if (
          prev[0] + prev[1] === offset &&
          !end_cap &&
          (!is_row_header || prev[3] == sort_key) &&
          (prev[4] === "i" || (prev[4] === "r" && prev[1] === 0))
        ) {
          prev[2] = prev[2].concat(ins);
          return;
        }
      }
      splices.push([offset, 0, ins, sort_key, end_cap ? "r" : "i"]);
    }

    function add_del(offset, del, ins) {
      if (splices.length > 0) {
        var prev = splices[splices.length - 1];
        if (prev[0] + prev[1] === offset && prev[4] !== "i") {
          prev[1] += del;
          return;
        }
      }
      splices.push([offset, del, ins, null, "d"]);
    }

    var offset = 0;
    function helper(node, _version, end_cap, is_row_header) {
      if (_version === version) {
        add_ins(
          offset,
          node.elems.slice(0),
          node.sort_key,
          end_cap,
          is_row_header
        );
      } else if (node.deleted_by[version] && node.elems.length > 0) {
        add_del(offset, node.elems.length, node.elems.slice(0, 0));
      }

      if (
        (!_version || is_anc(_version)) &&
        !Object.keys(node.deleted_by).some(is_anc)
      ) {
        offset += node.elems.length;
      }

      node.nexts.forEach((next) =>
        helper(next, next.version, node.end_cap, true)
      );
      if (node.next) helper(node.next, _version);
    }
    helper(S, null);
    splices.forEach((s) => {
      // if we have replaces with 0 deletes,
      // make them have at least 1 delete..
      // this can happen when there are multiple replaces of the same text,
      // and our code above will associate those deletes with only one of them
      if (s[4] === "r" && s[1] === 0) s[1] = 1;
    });
    return splices;
  };

  /// # sequence_crdt.apply_bubbles(root_node, to_bubble)
  ///
  /// This method helps prune away meta data and compress stuff when we have determined that certain versions can be renamed to other versions – these renamings are expressed in `to_bubble`, where keys are versions and values are "bubbles", each bubble is represented with an array of two elements, the first element is the "bottom" of the bubble, and the second element is the "top" of the bubble. We will use the "bottom" as the new name for the version, and we'll use the "top" as the new parents.
  /// 
  /// ``` js
  /// sequence_crdt.apply_bubbles(root_node, {
  ///   alice4: ['bob5', 'alice4'],
  ///   bob5: ['bob5', 'alice4']
  /// })
  /// ```
  sequence_crdt.apply_bubbles = (S, to_bubble) => {
    sequence_crdt.traverse(
      S,
      () => true,
      (node) => {
        if (
          to_bubble[node.version] &&
          to_bubble[node.version][0] != node.version
        ) {
          if (!node.sort_key) node.sort_key = node.version;
          node.version = to_bubble[node.version][0];
        }

        for (var x of Object.keys(node.deleted_by)) {
          if (to_bubble[x]) {
            delete node.deleted_by[x];
            node.deleted_by[to_bubble[x][0]] = true;
          }
        }
      },
      true
    );

    function set_nnnext(node, next) {
      while (node.next) node = node.next;
      node.next = next;
    }

    do_line(S, S.version);
    function do_line(node, version) {
      var prev = null;
      while (node) {
        if (node.nexts[0] && node.nexts[0].version == version) {
          for (let i = 0; i < node.nexts.length; i++) {
            delete node.nexts[i].version;
            delete node.nexts[i].sort_key;
            set_nnnext(
              node.nexts[i],
              i + 1 < node.nexts.length ? node.nexts[i + 1] : node.next
            );
          }
          node.next = node.nexts[0];
          node.nexts = [];
        }

        if (node.deleted_by[version]) {
          node.elems = node.elems.slice(0, 0);
          node.deleted_by = {};
          if (prev) {
            node = prev;
            continue;
          }
        }

        var next = node.next;

        if (
          !node.nexts.length &&
          next &&
          (!node.elems.length ||
            !next.elems.length ||
            (Object.keys(node.deleted_by).every((x) => next.deleted_by[x]) &&
              Object.keys(next.deleted_by).every((x) => node.deleted_by[x])))
        ) {
          if (!node.elems.length) node.deleted_by = next.deleted_by;
          node.elems = node.elems.concat(next.elems);
          node.end_cap = next.end_cap;
          node.nexts = next.nexts;
          node.next = next.next;
          continue;
        }

        if (next && !next.elems.length && !next.nexts.length) {
          node.next = next.next;
          continue;
        }

        for (let n of node.nexts) do_line(n, n.version);

        prev = node;
        node = next;
      }
    }
  };

  /// # sequence_crdt.get(root_node, i, is_anc)
  /// 
  /// Returns the element at the `i`th position (0-based) in the `sequence_crdt` rooted at `root_node`, when only considering versions which result in `true` when passed to `is_anc`.
  /// 
  /// ``` js
  /// var x = sequence_crdt.get(root_node, 2, {
  ///     alice1: true
  /// })
  /// ```
  sequence_crdt.get = (S, i, is_anc) => {
    var ret = null;
    var offset = 0;
    sequence_crdt.traverse(S, is_anc ? is_anc : () => true, (node) => {
      if (i - offset < node.elems.length) {
        ret = node.elems[i - offset];
        return false;
      }
      offset += node.elems.length;
    });
    return ret;
  };

  /// # sequence_crdt.set(root_node, i, v, is_anc)
  /// 
  /// Sets the element at the `i`th position (0-based) in the `sequence_crdt` rooted at `root_node` to the value `v`, when only considering versions which result in `true` when passed to `is_anc`.
  /// 
  /// ``` js
  /// sequence_crdt.set(root_node, 2, 'x', {
  ///   alice1: true
  /// })
  /// ```
  sequence_crdt.set = (S, i, v, is_anc) => {
    var offset = 0;
    sequence_crdt.traverse(S, is_anc ? is_anc : () => true, (node) => {
      if (i - offset < node.elems.length) {
        if (typeof node.elems == "string")
          node.elems =
            node.elems.slice(0, i - offset) +
            v +
            node.elems.slice(i - offset + 1);
        else node.elems[i - offset] = v;
        return false;
      }
      offset += node.elems.length;
    });
  };

  /// # sequence_crdt.length(root_node, is_anc)
  /// 
  /// Returns the length of the `sequence_crdt` rooted at `root_node`, when only considering versions which result in `true` when passed to `is_anc`.
  /// 
  /// ``` js
  /// console.log(sequence_crdt.length(root_node, {
  ///  alice1: true
  /// }))
  /// ```
  sequence_crdt.length = (S, is_anc) => {
    var count = 0;
    sequence_crdt.traverse(S, is_anc ? is_anc : () => true, (node) => {
      count += node.elems.length;
    });
    return count;
  };

  /// # sequence_crdt.break_node(node, break_position, end_cap, new_next)
  /// 
  /// This method breaks apart a `sequence_crdt` node into two nodes, each representing a subsequence of the sequence represented by the original node. The `node` parameter is modified into the first node, and the second node is returned. The first node represents the elements of the sequence before `break_position`, and the second node represents the rest of the elements. If `end_cap` is truthy, then the first node will have `end_cap` set – this is generally done if the elements in the second node are being replaced. This method will add `new_next` to the first node's `nexts` array.
  /// 
  /// ``` js
  /// var node = sequence_crdt.create_node('alice1', 'hello') // node.elems == 'hello'
  /// var second = sequence_crdt.break_node(node, 2) // now node.elems == 'he', and second.elems == 'llo'
  /// ```
  sequence_crdt.break_node = (node, x, end_cap, new_next) => {
    var tail = sequence_crdt.create_node(
      null,
      node.elems.slice(x),
      node.end_cap
    );
    Object.assign(tail.deleted_by, node.deleted_by);
    tail.nexts = node.nexts;
    tail.next = node.next;

    node.elems = node.elems.slice(0, x);
    node.end_cap = end_cap;
    node.nexts = new_next ? [new_next] : [];
    node.next = tail;

    return tail;
  };

  /// # sequence_crdt.add_version(root_node, version, splices, [is_anc])
  /// 
  /// This is the main method in sequence_crdt, used to modify the sequence. The modification must be given a unique `version` string, and the modification itself is represented as an array of `splices`, where each splice looks like this: `[position, num_elements_to_delete, elements_to_insert, optional_sort_key]`. 
  /// 
  /// Note that all positions are relative to the original sequence, before any splices have been applied. Positions are counted by only considering nodes with versions which result in `true` when passed to `is_anc`. (and are not `deleted_by` any versions which return `true` when passed to `is_anc`).
  /// 
  /// ``` js
  /// var node = sequence_crdt.create_node('alice1', 'hello') 
  /// sequence_crdt.add_version(node, 'alice2', [[5, 0, ' world']], null, v => v == 'alice1') 
  /// ```
  sequence_crdt.add_version = (S, version, splices, is_anc) => {
    var rebased_splices = [];

    function add_to_nexts(nexts, to) {
      var i = binarySearch(nexts, function (x) {
        if ((to.sort_key || to.version) < (x.sort_key || x.version)) return -1;
        if ((to.sort_key || to.version) > (x.sort_key || x.version)) return 1;
        return 0;
      });
      nexts.splice(i, 0, to);
    }

    var si = 0;
    var delete_up_to = 0;

    var process_patch = (node, offset, has_nexts, prev, _version, deleted) => {
      var s = splices[si];
      if (!s) return;
      var sort_key = s[3];

      if (deleted) {
        if (s[1] == 0 && s[0] == offset) {
          if (node.elems.length == 0 && !node.end_cap && has_nexts) return;
          var new_node = sequence_crdt.create_node(
            version,
            s[2],
            null,
            sort_key
          );

          fresh_nodes.add(new_node);

          if (node.elems.length == 0 && !node.end_cap)
            add_to_nexts(node.nexts, new_node);
          else sequence_crdt.break_node(node, 0, undefined, new_node);
          si++;
        }

        if (
          delete_up_to <= offset &&
          s[1] &&
          s[2] &&
          s[0] == offset &&
          node.end_cap &&
          !has_nexts &&
          (node.next && node.next.elems.length) &&
          !Object.keys(node.next.deleted_by).some((version) => f(version))
        ) {
          delete_up_to = s[0] + s[1];

          var new_node = sequence_crdt.create_node(
            version,
            s[2],
            null,
            sort_key
          );

          fresh_nodes.add(new_node);

          add_to_nexts(node.nexts, new_node);
        }

        return;
      }

      if (s[1] == 0) {
        var d = s[0] - (offset + node.elems.length);
        if (d > 0) return;
        if (d == 0 && !node.end_cap && has_nexts) return;
        var new_node = sequence_crdt.create_node(version, s[2], null, sort_key);

        fresh_nodes.add(new_node);

        if (d == 0 && !node.end_cap) {
          add_to_nexts(node.nexts, new_node);
        } else {
          sequence_crdt.break_node(node, s[0] - offset, undefined, new_node);
        }
        si++;
        return;
      }

      if (delete_up_to <= offset) {
        var d = s[0] - (offset + node.elems.length);

        let add_at_end =
          d == 0 &&
          s[2] &&
          node.end_cap &&
          !has_nexts &&
          (node.next && node.next.elems.length) &&
          !Object.keys(node.next.deleted_by).some((version) => f(version));

        if (d > 0 || (d == 0 && !add_at_end)) return;

        delete_up_to = s[0] + s[1];

        if (s[2]) {
          var new_node = sequence_crdt.create_node(
            version,
            s[2],
            null,
            sort_key
          );

          fresh_nodes.add(new_node);

          if (add_at_end) {
            add_to_nexts(node.nexts, new_node);
          } else {
            sequence_crdt.break_node(node, s[0] - offset, true, new_node);
          }
          return;
        } else {
          if (s[0] == offset) {
          } else {
            sequence_crdt.break_node(node, s[0] - offset);
            return;
          }
        }
      }

      if (delete_up_to > offset) {
        if (delete_up_to <= offset + node.elems.length) {
          if (delete_up_to < offset + node.elems.length) {
            sequence_crdt.break_node(node, delete_up_to - offset);
          }
          si++;
        }
        node.deleted_by[version] = true;
        return;
      }
    };

    var f = is_anc || (() => true);
    var offset = 0;
    var rebase_offset = 0;
    let fresh_nodes = new Set();
    function traverse(node, prev, version) {
      if (!version || f(version)) {
        var has_nexts = node.nexts.find((next) => f(next.version));
        var deleted = Object.keys(node.deleted_by).some((version) =>
          f(version)
        );
        let rebase_deleted = Object.keys(node.deleted_by).length;
        process_patch(node, offset, has_nexts, prev, version, deleted);

        if (!deleted) offset += node.elems.length;
        if (!rebase_deleted && Object.keys(node.deleted_by).length)
          rebased_splices.push([rebase_offset, node.elems.length, ""]);
      }
      if (fresh_nodes.has(node))
        rebased_splices.push([rebase_offset, 0, node.elems]);
      if (!Object.keys(node.deleted_by).length)
        rebase_offset += node.elems.length;

      for (var next of node.nexts) traverse(next, null, next.version);
      if (node.next) traverse(node.next, node, version);
    }
    traverse(S, null, S.version);

    return rebased_splices;
  };

  /// # sequence_crdt.traverse(root_node, is_anc, callback, [view_deleted, tail_callback])
  /// 
  /// Traverses the subset of nodes in the tree rooted at `root_node` whose versions return `true` when passed to `is_anc`. For each node, `callback` is called with these parameters: `node, offset, has_nexts, prev, version, deleted`, 
  /// 
  /// Where
  /// - `node` is the current node being traversed
  /// - `offset` says how many elements we have passed so far 
  /// - `has_nexts` is true if some of this node's `nexts` will be traversed according to `is_anc`
  /// - `prev` is a pointer to the node whos `next` points to this one, or `null` if this is the root node
  /// - `version` is the version of this node, or this node's `prev` if our version is `null`, or that node's `prev` if it is also `null`, etc
  /// - `deleted` is true if this node is deleted according to `is_anc`
  /// 
  /// Usually we skip deleted nodes when traversing, but we'll include them if `view_deleted` is `true`. 
  /// 
  /// `tail_callback` is an optional callback that will get called with a single parameter `node` after all of that node's children `nexts` and `next` have been traversed. 
  /// 
  /// ``` js
  /// sequence_crdt.traverse(node, () => true, node =>
  ///   process.stdout.write(node.elems)) 
  /// ```
  sequence_crdt.traverse = (S, f, cb, view_deleted, tail_cb) => {
    var offset = 0;
    function helper(node, prev, version) {
      var has_nexts = node.nexts.find((next) => f(next.version));
      var deleted = Object.keys(node.deleted_by).some((version) => f(version));
      if (view_deleted || !deleted) {
        if (cb(node, offset, has_nexts, prev, version, deleted) == false)
          return true;
        offset += node.elems.length;
      }
      for (var next of node.nexts)
        if (f(next.version)) {
          if (helper(next, null, next.version)) return true;
        }
      if (node.next) {
        if (helper(node.next, node, version)) return true;
      } else if (tail_cb) tail_cb(node);
    }
    helper(S, null, S.version);
  };

  // modified from https://stackoverflow.com/questions/22697936/binary-search-in-javascript
  function binarySearch(ar, compare_fn) {
    var m = 0;
    var n = ar.length - 1;
    while (m <= n) {
      var k = (n + m) >> 1;
      var cmp = compare_fn(ar[k]);
      if (cmp > 0) {
        m = k + 1;
      } else if (cmp < 0) {
        n = k - 1;
      } else {
        return k;
      }
    }
    return m;
  }
})();

if (typeof module != "undefined")
  module.exports = {
    create_antimatter_crdt,
    create_json_crdt,
    sequence_crdt,
  };


================================================
FILE: antimatter_ts/doc.html
================================================
<head>
<link rel="stylesheet" href="https://unpkg.com/@highlightjs/cdn-assets@11.1.0/styles/default.min.css">
</head>

<script src="https://unpkg.com/marked@4.0.5"></script>

<script src="https://unpkg.com/@highlightjs/cdn-assets@11.1.0/highlight.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.1.0/languages/javascript.min.js"></script>

<body></body>
<script>

;(async () => {
    let x = await fetch(`https://braid-org.github.io/braidjs/antimatter/antimatter.js`)
    x = await x.text()
    let code = x

    x = await fetch(`https://braid-org.github.io/braidjs/antimatter/readme.md`)
    x = await x.text()
    let md = x

    let code_blocks = []
    if (true) {
        let agg = code_blocks = []
        let prev_o = 0
        code.replace(/^[\t ]*(?:antimatter|self|json|sequence)\.(?:.*?) = (?:.*?)=> \(?\{\n|^[\t ]*(?:\} else )?if \(cmd == (?:.*?)\) \{\n/gm, (_0, o) => {
            agg.push(code.slice(prev_o, o))
            prev_o = o
        })
        agg.push(code.slice(prev_o))
    }
    code_blocks = code_blocks.filter(x => x)

    let md_blocks = []
    if (true) {
        let agg = md_blocks = []
        let prev_o = 0
        md.replace(/^(?:# antimatter|# json|# sequence|## message)/gm, (_0, o) => {
            agg.push(md.slice(prev_o, o))
            prev_o = o
        })
        agg.push(md.slice(prev_o))
    }
    md_blocks = md_blocks.filter(x => x)

    function make_md(s) {
        let d = make_html(`<div style="background:hsl(${Math.random() * 360}, 100%, 100%);width:50%"></div>`)
        d.innerHTML = marked.parse(s)
        return d
    }

    function make_code(s) {
        let vv = hljs.highlight(s, {language: 'javascript'}).value
        let d = make_html(`<pre style="margin:0px;background:hsl(${Math.random() * 360}, 100%, 100%);width:50%">${vv}</pre>`)
        return d
    }

    while (md_blocks.length) {
        let left = md_blocks.shift()
        let right = code_blocks.shift()

        let d = make_html(`<div style="border-top:1px solid black;display:flex;align-items: start;"></div>`)
        d.append(make_md(left))
        d.append(make_code(right))
        document.body.append(d)
    }
})()

function make_html(s) {
    let d = document.createElement('div')
    d.innerHTML = s
    return d.firstChild
}

</script>


================================================
FILE: antimatter_ts/package.json
================================================
{
  "name": "@braidjs/antimatter",
  "version": "0.0.23",
  "description": "antimatter: a pruning algorithm for CRDTs and other mergeables",
  "main": "antimatter.js",
  "scripts": {
    "test": "node test.js"
  },
  "author": "Braid Working Group",
  "repository": "braid-org/braidjs",
  "homepage": "https://braid.org/antimatter",
  "packageManager": "pnpm@9.0.4+sha256.caa915eaae9d9aefccf50ee8aeda25a2f8684d8f9d5c6e367eaf176d97c1f89e",
  "dependencies": {
    "typescript": "^5.6.2"
  }
}


================================================
FILE: antimatter_ts/random002.js
================================================

// the next two functions added by me

function create_rand(seed) {
  if (typeof(seed) == 'string') {
    var t = new MersenneTwister(0)
    var a = []
    for (var i = 0; i < seed.length; i++)
      a[i] = seed.charCodeAt(i)
    t.init_by_array(a, a.length)
  } else if (typeof(seed) == 'number') {
    var t = new MersenneTwister(seed)
  } else {
    var t = new MersenneTwister()
  }
  return () => t.random()
}

Math.randomSeed = function (seed) {
  var r = create_rand(seed)
  Math.random = () => r()
}

/* The following piece of code is an implementation of MersenneTwister object
   taken from https://gist.github.com/banksean/300494, with one method 
   xor_array(array, size) added.
*/

/*
  I've wrapped Makoto Matsumoto and Takuji Nishimura's code in a namespace
  so it's better encapsulated. Now you can have multiple random number generators
  and they won't stomp all over eachother's state.
  
  If you want to use this as a substitute for Math.random(), use the random()
  method like so:
  
  var m = new MersenneTwister();
  var randomNumber = m.random();
  
  You can also call the other genrand_{foo}() methods on the instance.

  If you want to use a specific seed in order to get a repeatable random
  sequence, pass an integer into the constructor:

  var m = new MersenneTwister(123);

  and that will always produce the same random sequence.

  Sean McCullough (banksean@gmail.com)
*/

/* 
   A C-program for MT19937, with initialization improved 2002/1/26.
   Coded by Takuji Nishimura and Makoto Matsumoto.
 
   Before using, initialize the state by using init_genrand(seed)  
   or init_by_array(init_key, key_length).
 
   Copyright (C) 1997 - 2002, Makoto Matsumoto and Takuji Nishimura,
   All rights reserved.                          
 
   Redistribution and use in source and binary forms, with or without
   modification, are permitted provided that the following conditions
   are met:
 
     1. Redistributions of source code must retain the above copyright
        notice, this list of conditions and the following disclaimer.
 
     2. Redistributions in binary form must reproduce the above copyright
        notice, this list of conditions and the following disclaimer in the
        documentation and/or other materials provided with the distribution.
 
     3. The names of its contributors may not be used to endorse or promote 
        products derived from this software without specific prior written 
        permission.
 
   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
   "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
   A PARTICULAR PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR
   CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 
   Any feedback is very welcome.
   http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/emt.html
   email: m-mat @ math.sci.hiroshima-u.ac.jp (remove space)
*/

var MersenneTwister = function(seed) {
  if (seed == undefined) {
    seed = new Date().getTime();
  } 
  /* Period parameters */  
  this.N = 624;
  this.M = 397;
  this.MATRIX_A = 0x9908b0df;   /* constant vector a */
  this.UPPER_MASK = 0x80000000; /* most significant w-r bits */
  this.LOWER_MASK = 0x7fffffff; /* least significant r bits */
 
  this.mt = new Array(this.N); /* the array for the state vector */
  this.mti=this.N+1; /* mti==N+1 means mt[N] is not initialized */

  this.init_genrand(seed);
}  
 
/* initializes mt[N] with a seed */
MersenneTwister.prototype.init_genrand = function(s) {
  this.mt[0] = s >>> 0;
  for (this.mti=1; this.mti<this.N; this.mti++) {
      var s = this.mt[this.mti-1] ^ (this.mt[this.mti-1] >>> 30);
   this.mt[this.mti] = (((((s & 0xffff0000) >>> 16) * 1812433253) << 16) + (s & 0x0000ffff) * 1812433253)
  + this.mti;
      /* See Knuth TAOCP Vol2. 3rd Ed. P.106 for multiplier. */
      /* In the previous versions, MSBs of the seed affect   */
      /* only MSBs of the array mt[].                        */
      /* 2002/01/09 modified by Makoto Matsumoto             */
      this.mt[this.mti] >>>= 0;
      /* for >32 bit machines */
  }
}
 
/* initialize by an array with array-length */
/* init_key is the array for initializing keys */
/* key_length is its length */
/* slight change for C++, 2004/2/26 */
MersenneTwister.prototype.init_by_array = function(init_key, key_length) {
  var i, j, k;
  this.init_genrand(19650218);
  i=1; j=0;
  k = (this.N>key_length ? this.N : key_length);
  for (; k; k--) {
    var s = this.mt[i-1] ^ (this.mt[i-1] >>> 30)
    this.mt[i] = (this.mt[i] ^ (((((s & 0xffff0000) >>> 16) * 1664525) << 16) + ((s & 0x0000ffff) * 1664525)))
      + init_key[j] + j; /* non linear */
    this.mt[i] >>>= 0; /* for WORDSIZE > 32 machines */
    i++; j++;
    if (i>=this.N) { this.mt[0] = this.mt[this.N-1]; i=1; }
    if (j>=key_length) j=0;
  }
  for (k=this.N-1; k; k--) {
    var s = this.mt[i-1] ^ (this.mt[i-1] >>> 30);
    this.mt[i] = (this.mt[i] ^ (((((s & 0xffff0000) >>> 16) * 1566083941) << 16) + (s & 0x0000ffff) * 1566083941))
      - i; /* non linear */
    this.mt[i] >>>= 0; /* for WORDSIZE > 32 machines */
    i++;
    if (i>=this.N) { this.mt[0] = this.mt[this.N-1]; i=1; }
  }

  this.mt[0] = 0x80000000; /* MSB is 1; assuring non-zero initial array */ 
}
 
/* XORs the mt array with a given array xor_key of length key_length */
MersenneTwister.prototype.xor_array = function(xor_key, key_length) {
  var i, j;
  j = 0;
  for (i = 0; i < this.N; i++) {
    this.mt[i] ^= xor_key[j];
    this.mt[i] >>>= 0;
    j++;
    if (j >= key_length) j = 0;
  }
}

/* generates a random number on [0,0xffffffff]-interval */
MersenneTwister.prototype.genrand_int32 = function() {
  var y;
  var mag01 = new Array(0x0, this.MATRIX_A);
  /* mag01[x] = x * MATRIX_A  for x=0,1 */

  if (this.mti >= this.N) { /* generate N words at one time */
    var kk;

    if (this.mti == this.N+1)   /* if init_genrand() has not been called, */
      this.init_genrand(5489); /* a default initial seed is used */

    for (kk=0;kk<this.N-this.M;kk++) {
      y = (this.mt[kk]&this.UPPER_MASK)|(this.mt[kk+1]&this.LOWER_MASK);
      this.mt[kk] = this.mt[kk+this.M] ^ (y >>> 1) ^ mag01[y & 0x1];
    }
    for (;kk<this.N-1;kk++) {
      y = (this.mt[kk]&this.UPPER_MASK)|(this.mt[kk+1]&this.LOWER_MASK);
      this.mt[kk] = this.mt[kk+(this.M-this.N)] ^ (y >>> 1) ^ mag01[y & 0x1];
    }
    y = (this.mt[this.N-1]&this.UPPER_MASK)|(this.mt[0]&this.LOWER_MASK);
    this.mt[this.N-1] = this.mt[this.M-1] ^ (y >>> 1) ^ mag01[y & 0x1];

    this.mti = 0;
  }

  y = this.mt[this.mti++];

  /* Tempering */
  y ^= (y >>> 11);
  y ^= (y << 7) & 0x9d2c5680;
  y ^= (y << 15) & 0xefc60000;
  y ^= (y >>> 18);

  return y >>> 0;
}
 
/* generates a random number on [0,0x7fffffff]-interval */
MersenneTwister.prototype.genrand_int31 = function() {
  return (this.genrand_int32()>>>1);
}
 
/* generates a random number on [0,1]-real-interval */
MersenneTwister.prototype.genrand_real1 = function() {
  return this.genrand_int32()*(1.0/4294967295.0); 
  /* divided by 2^32-1 */ 
}

/* generates a random number on [0,1)-real-interval */
MersenneTwister.prototype.random = function() {
  return this.genrand_int32()*(1.0/4294967296.0); 
  /* divided by 2^32 */
}
 
/* generates a random number on (0,1)-real-interval */
MersenneTwister.prototype.genrand_real3 = function() {
  return (this.genrand_int32() + 0.5)*(1.0/4294967296.0); 
  /* divided by 2^32 */
}
 
/* generates a random number on [0,1) with 53-bit resolution*/
MersenneTwister.prototype.genrand_res53 = function() { 
  var a=this.genrand_int32()>>>5, b=this.genrand_int32()>>>6; 
  return(a*67108864.0+b)*(1.0/9007199254740992.0); 
} 

/* These real versions are due to Isaku Wada, 2002/01/09 added */

================================================
FILE: antimatter_ts/readme.md
================================================
# antimatter: an algorithm that prunes CRDT/OT history

[Antimatter](https://braid.org/antimatter) is the world's first peer-to-peer synchronization algorithm that can prune its history in a network where peers disconnect, reconnect, and merge offline edits.  Antimatter supports arbitrary simultaneous edits, from arbitrary peers, under arbitrary network delays and partitions, and guarantees full CRDT/OT consistency, while pruning unnecessary history within each partitioned subnet, and across subnets once they reconnect.  In steady state, it prunes down to zero overhead.  This lets you put synchronizing data structures in more parts of your software, without worrying about memory overhead.

This package implements an antimatter peer composed of three objects:

```js
var {create_antimatter_crdt, create_json_crdt, sequence_crdt} = require('@braidjs/antimatter')
```

- *antimatter_crdt*: created using `create_antimatter_crdt`, this object is a json_crdt with antimatter algorithm methods added to it so that it can communicate with other peers to learn which history can be pruned, and tells the underlying json_crdt object to prune it.
- *json_crdt*: created using `create_json_crdt`, this object is a pruneable JSON CRDT — "JSON" meaning it represents an arbitrary JSON datstructure, and "CRDT" and "pruneable" having the same meaning as for sequence_crdt below. The json_crdt makes recursive use of sequence_crdt structures to represent arbitrary JSON (for instance, a map is represented with a sequence_crdt structure for each value, where the first element in the sequence is the value).
- *sequence_crdt*: methods to manipulate a pruneable sequence CRDT — "sequence" meaning it represents a javascript string or array, "CRDT" meaning this structure can be merged with other ones, and "pruneable" meaning that it supports an operation to remove meta-data when it is no longer needed (whereas CRDT's often keep track of this meta-data forever).

The Antimatter Algorithm was invented by Michael Toomim and Greg Little in the
[Braid Project](https://braid.org) of [Invisible College](https://invisible.college/).

[Click here to see more details, and the API side-by-side with the source code.](https://braid.org/antimatter)


================================================
FILE: antimatter_ts/src/antimatter_crdt.ts
================================================
/// # Software Architecture
/// The software is architected into three objects:
///
/// ``` js
/// let {create_antimatter_crdt, create_json_crdt, sequence_crdt} = require('@braidjs/antimatter') 
/// ```

import { create_json_crdt } from "./json_crdt.ts";

// v522

/// - *antimatter_crdt*: created using `create_antimatter_crdt`, this object is a json_crdt with antimatter algorithm methods added to it so that it can communicate with other peers to learn which history can be pruned, and tells the underlying json_crdt object to prune it.
export let create_antimatter_crdt;

/// # create_antimatter_crdt(send[, init])
///
/// Creates and returns a new antimatter_crdt object (or adds antimatter_crdt methods and properties to `init`).
///
/// * `send`: A callback function to be called whenever this antimatter_crdt wants to send a
///   message over a connection registered with `get` or `connect`. The sole
///   parameter to this function is a JSONafiable object that hopes to be passed to
///   the `receive` method on the antimatter_crdt object at the other end of the
///   connection specified in the `conn` key.
/// * `get_time`: function that returns a number representing time (e.g. `Date.now()`)
/// * `set_timeout`: function that takes a callback and timeout length, and calls that callback after that amount of time; also returns an identifier that can be passed to `clear_timeout` to cancel the timeout (e.g. wrapping the javascript setTimeout)
/// * `clear_timeout`: function that takes a timeout identifier an cancels it (e.g. wrapping the javascript clearTimeout)
/// * `init`: (optional) An antimatter_crdt object to start with, which we'll add any properties to that it doesn't have, and we'll add all the antimatter_crdt methods to it. This option exists so you can serialize an antimatter_crdt instance as JSON, and then restore it later. 
/// ``` js
/// let antimatter_crdt = create_antimatter_crdt(msg => {
///     websockets[msg.conn].send(JSON.stringify(msg))
///   },
///   () => Date.now(),
///   (func, t) => setTimeout(func, t),
///   (t) => clearTimeout(t)),
///.  JSON.parse(fs.readFileSync('./antimatter.backup'))
/// )
/// ```
create_antimatter_crdt = (
  send,
  get_time,
  set_timeout,
  clear_timeout,
  self
) => {
  self = create_json_crdt(self);
  self.send = send;

  self.id = self.id || Math.random().toString(36).slice(2);
  self.next_seq = self.next_seq || 0;

  self.conns = self.conns || {};
  self.proto_conns = self.proto_conns || {};
  self.conn_count = self.conn_count || 0;

  self.fissures = self.fissures || {};
  self.acked_boundary = self.acked_boundary || {};
  self.marcos = self.marcos || {};
  self.forget_cbs = self.forget_cbs || {};

  self.version_groups = self.version_groups || {};

  self.marco_map = self.marco_map || {};
  self.marco_time_est_1 = self.marco_time_est_1 || 1000;
  self.marco_time_est_2 = self.marco_time_est_2 || 1000;
  self.marco_current_wait_time = self.marco_current_wait_time || 1000;
  self.marco_increases_allowed = 1;
  self.marco_timeout = self.marco_timeout || null;

  function raw_add_version_group(version_array) {
    let version_map = {};
    for (let v of version_array) {
      if (version_map[v]) continue;
      version_map[v] = true;
      if (self.version_groups[v]) self.version_groups[v].forEach((v) => (version_map[v] = true));
    }
    let version_group = Object.keys(version_map).sort();
    version_group.forEach((v) => (self.version_groups[v] = version_group));
    return version_group;
  }

  function get_parent_and_child_sets(children) {
    let parent_sets = {};
    let child_sets = {};
    let done = {};
    function add_set_to_sets(s, sets, mark_done) {
      let container = { members: s };
      let array = Object.keys(s);
      if (array.length < 2) return;
      for (let v of array) {
        sets[v] = container;
        if (mark_done) done[v] = true;
      }
    }
    add_set_to_sets(self.current_version, parent_sets, true);
    for (let v of Object.keys(self.T)) {
      if (done[v]) continue;
      done[v] = true;
      if (!children[v]) continue;
      let first_child_set = children[v];
      let first_child_array = Object.keys(first_child_set);
      let first_parent_set = self.T[first_child_array[0]];
      let first_parent_array = Object.keys(first_parent_set);
      if (
        first_child_array.every((child) => {
          let parent_set = self.T[child];
          let parent_array = Object.keys(parent_set);
          return (
            parent_array.length == first_parent_array.length &&
            parent_array.every((parent) => first_parent_set[parent])
          );
        }) &&
        first_parent_array.every((parent) => {
          let child_set = children[parent];
          let child_array = Object.keys(child_set);
          return (
            child_array.length == first_child_array.length &&
            child_array.every((child) => first_child_set[child])
          );
        })
      ) {
        add_set_to_sets(first_parent_set, parent_sets, true);
        add_set_to_sets(first_child_set, child_sets);
      }
    }
    return { parent_sets, child_sets };
  }

  function find_one_bubble(bottom, children, child_sets, restricted) {
    let expecting = { ...bottom };
    let seen = {};
    Object.keys(bottom).forEach(
      (v) =>
        children[v] &&
        Object.keys(children[v]).forEach((v) => (seen[v] = true))
    );
    let q = Object.keys(expecting);
    let last_top = null;
    while (q.length) {
      cur = q.shift();
      if (!self.T[cur]) {
        if (!restricted) throw "bad";
        else return last_top;
      }
      if (restricted && restricted[cur]) return last_top;

      if (seen[cur]) continue;

      if (children[cur] && !Object.keys(children[cur]).every((c) => seen[c]))
        continue;
      seen[cur] = true;
      delete expecting[cur];

      if (!Object.keys(expecting).length) {
        last_top = { [cur]: true };
        if (!restricted) return last_top;
      }

      Object.keys(self.T[cur]).forEach((p) => {
        expecting[p] = true;
        q.push(p);
      });

      if (
        child_sets[cur] &&
        Object.keys(child_sets[cur].members).every((v) => seen[v])
      ) {
        let expecting_array = Object.keys(expecting);
        let parent_set = self.T[cur];
        let parent_array = Object.keys(parent_set);
        if (
          expecting_array.length == parent_array.length &&
          expecting_array.every((v) => parent_set[v])
        ) {
          last_top = child_sets[cur].members;
          if (!restricted) return last_top;
        }
      }
    }
    return last_top;
  }

  function add_version_group(version_array) {
    let version_group = raw_add_version_group(version_array);
    if (!version_array.some((x) => self.T[x])) return version_group[0];

    let children = self.get_child_map();
    let { parent_sets, child_sets } = get_parent_and_child_sets(children);

    let to_bubble = {};
    function mark_bubble(v, bubble) {
      if (to_bubble[v]) return;
      to_bubble[v] = bubble;
      for (let vv of Object.keys(self.T[v])) mark_bubble(vv, bubble);
    }

    let bottom = Object.fromEntries(
      version_group.filter((x) => self.T[x]).map((x) => [x, true])
    );
    let top = find_one_bubble(bottom, children, child_sets);
    let bubble = [Object.keys(bottom).sort()[0], Object.keys(top)[0]];
    for (let v of Object.keys(top)) to_bubble[v] = bubble;
    for (let v of Object.keys(bottom)) mark_bubble(v, bubble);

    self.apply_bubbles(to_bubble);
    return version_group[0];
  }

  let orig_send = send;
  send = (x) => {
    if (self.version_groups[x.version])
      x.version = self.version_groups[x.version];
    if (x.parents) {
      x.parents = { ...x.parents };
      Object.keys(x.parents).forEach((v) =>
        self.version_groups[v] && self.version_groups[v].forEach((v) => (x.parents[v] = true))
      );
    }
    if (Array.isArray(x.versions)) {
      x.versions = JSON.parse(JSON.stringify(x.versions));
      x.versions.forEach(
        (v) =>
          self.version_groups[v.version] &&
          (v.version = self.version_groups[v.version])
      );
      x.versions.forEach((v) => {
        Object.keys(v.parents).forEach((vv) =>
          self.version_groups[vv] && self.version_groups[vv].forEach((vv) => (v.parents[vv] = true))
        );
      });
    }

    orig_send(x);
  };

  /// # antimatter_crdt.receive(message)
  ///
  /// Let this antimatter object "receive" a message from another antimatter object, presumably from its `send` callback.
  /// ``` js
  /// websocket.on('message', data => {
  ///     antimatter_crdt.receive(JSON.parse(data)) });
  /// ```
  /// You generally do not need to mess with a message object directly, but below are the various message objects you might see, categorized by their `cmd` entry. Note that each object also
  ///   contains a `conn` entry with the id of the connection the message is sent
  ///   over.
  self.receive = (x) => {
    let {
      cmd,
      version,
      parents,
      patches,
      versions,
      fissure,
      fissures,
      seen,
      forget,
      marco,
      peer,
      conn,
    } = x;

    if (version && typeof version != "string") {
      if (!self.T[version[0]]) version = add_version_group(version);
      else version = version[0];
    }
    if (parents) {
      parents = { ...parents };
      Object.keys(parents).forEach((v) => {
        if (self.version_groups[v] && self.version_groups[v][0] != v)
          delete parents[v];
      });
    }

    if (versions && versions.forEach) versions.forEach((v) => {
      if (typeof v.version != "string") {
        if (!self.T[v.version[0]]) v.version = add_version_group(v.version);
        else v.version = v.version[0];
      }
      v.parents = { ...v.parents };
      Object.keys(v.parents).forEach((vv) => {
        if (self.version_groups[vv] && self.version_groups[vv][0] != vv)
          delete v.parents[vv];
      });
    });

    let marco_versions_array = version
      ? [version]
      : versions && !Array.isArray(versions)
        ? Object.keys(versions).sort()
        : null;
    let marco_versions =
      marco_versions_array &&
      Object.fromEntries(marco_versions_array.map((v) => [v, true]));

    if (versions && !Array.isArray(versions)) {
      versions = { ...versions };
      Object.keys(versions).forEach((v) => {
        if (self.version_groups[v] && self.version_groups[v][0] != v)
          delete versions[v];
      });
      if (!Object.keys(versions).length) return;
    }

    /// ## message `get`
    /// `get` is the first message sent over a connection, and the peer at the other end will respond with `welcome`.
    /// ``` js
    /// { cmd: 'get',
    ///   peer: 'SENDER_ID',
    ///   conn: 'CONN_ID',
    ///   parents: {'PARENT_VERSION_ID': true, ...} }
    /// ```
    /// The `parents` are optional, and describes which versions this peer already has. The other end will respond with versions since that set of parents.
    if (cmd == "get" || (cmd == "welcome" && peer != null)) {
      if (self.conns[conn] != null) throw Error("bad");
      self.conns[conn] = { peer, seq: ++self.conn_count };
    }

    /// ## message `fissure`
    ///
    /// Sent to alert peers about a fissure. The `fissure` entry contains information about the two peers involved in the fissure, the specific connection id that broke, the `versions` that need to be protected, and the `time` of the fissure (in case we want to ignore it after some time). It is also possible to send multiple `fissures` in an array.
    /// ``` js
    /// { cmd: 'fissure',
    ///   fissure: { // or fissures: [{...}, {...}, ...],
    ///     a: 'PEER_A_ID',
    ///     b:  'PEER_B_ID',
    ///     conn: 'CONN_ID',
    ///     versions: {'VERSION_ID': true, ...},
    ///     time: Date.now()
    ///   },
    ///   conn: 'CONN_ID' }
    /// ```
    /// Note that `time` isn't used for anything critical, as it's just wallclock time.
    if (fissure) fissures = [fissure];

    if (fissures) fissures.forEach((f) => (f.t = self.conn_count));

    if (versions && (cmd == "set" || cmd == "welcome"))
      versions = Object.fromEntries(versions.map((v) => [v.version, v]));
    if (version) versions = { [version]: true };

    let rebased_patches = [];

    let fissures_back = [];
    let fissures_forward = [];
    let fissures_done = {};

    function copy_fissures(fs) {
      return fs.map((f) => {
        f = JSON.parse(JSON.stringify(f));
        delete f.t;
        return f;
      });
    }

    if (fissures) {
      let fiss_map = Object.fromEntries(
        fissures.map((f) => [f.a + ":" + f.b + ":" + f.conn, f])
      );
      for (let [key, f] of Object.entries(fiss_map)) {
        if (fissures_done[f.conn]) continue;
        fissures_done[f.conn] = true;

        let our_f = self.fissures[key];
        let other_key = f.b + ":" + f.a + ":" + f.conn;
        let their_other = fiss_map[other_key];
        let our_other = self.fissures[other_key];

        if (!our_f) self.fissures[key] = f;
        if (their_other && !our_other) self.fissures[other_key] = their_other;

        if (!their_other && !our_other && f.b == self.id) {
          if (self.conns[f.conn]) delete self.conns[f.conn];
          our_other = self.fissures[other_key] = {
            ...f,
            a: f.b,
            b: f.a,
            t: self.conn_count,
          };
        }

        if (!their_other && our_other) {
          fissures_back.push(f);
          fissures_back.push(our_other);
        }

        if (!our_f || (their_other && !our_other)) {
          fissures_forward.push(f);
          if (their_other || our_other)
            fissures_forward.push(their_other || our_other);
        }
      }
    }

    /// ## message `welcome`
    /// Sent in response to a `get`, basically contains the initial state of the document; incoming `welcome` messages are also propagated over all our other connections but only with information that was new to us, so the propagation will eventually stop. When sent in response to a `get` (rather than being propagated), we include a `peer` entry with the id of the sending peer, so they know who we are, and to trigger them to send us their own  `welcome` message.
    ///
    /// ``` js
    /// {
    ///   cmd: 'welcome',
    ///   versions: [
    ///     //each version looks like a set message...
    ///   ],
    ///   fissures: [
    ///     //each fissure looks as it would in a fissure message...
    ///   ],
    ///   parents: 
    ///     {
    ///       //versions you must have before consuming these new versions
    ///       'PARENT_VERSION_ID': true,
    ///       ...
    ///     },
    ///   [peer: 'SENDER_ID'], // if responding to a get
    ///   conn: 'CONN_ID'
    /// } 
    /// ```
    let _T = {};
    let added_versions = [];
    if (cmd == "welcome") {
      let versions_to_add = {};
      let vs = Object.values(versions);
      vs.forEach((v) => (versions_to_add[v.version] = v.parents));
      vs.forEach((v) => {
        if (
          self.T[v.version] ||
          (self.version_groups[v.version] &&
            self.version_groups[v.version][0] != v.version)
        ) {
          remove_ancestors(v.version);
          function remove_ancestors(v) {
            if (versions_to_add[v]) {
              Object.keys(versions_to_add[v]).forEach(remove_ancestors);
              delete versions_to_add[v];
            }
          }
        }
      });

      for (let v of vs) _T[v.version] = v.parents;

      l1: for (let v of vs) {
        if (versions_to_add[v.version]) {
          let ps = Object.keys(v.parents);

          if (!ps.length && Object.keys(self.T).length) continue;
          for (p of ps) if (!self.T[p]) continue l1;

          rebased_patches = rebased_patches.concat(
            self.add_version(v.version, v.parents, v.patches, v.sort_keys)
          );

          added_versions.push(v);
          delete _T[v.version];
        }
      }
    }

    if (cmd == "get" || (cmd == "welcome" && peer != null)) {
      let fissures_back = Object.values(self.fissures);

      if (cmd == "welcome") {
        let leaves = { ..._T };
        Object.keys(_T).forEach((v) => {
          Object.keys(_T[v]).forEach((p) => delete leaves[p]);
        });

        let f = {
          a: self.id,
          b: peer,
          conn: "-" + conn,
          versions: Object.fromEntries(
            added_versions
              .concat(Object.keys(leaves).map((v) => versions[v]))
              .map((v) => [v.version, true])
          ),
          time: get_time(),
       
Download .txt
gitextract_8cg5elin/

├── .gitignore
├── antimatter/
│   ├── antimatter.js
│   ├── doc.html
│   ├── package.json
│   ├── readme.md
│   └── test.html
├── antimatter_ts/
│   ├── antimatter.js
│   ├── doc.html
│   ├── package.json
│   ├── random002.js
│   ├── readme.md
│   ├── src/
│   │   ├── antimatter_crdt.ts
│   │   ├── json_crdt.ts
│   │   └── sequence_crdt.ts
│   ├── test.html
│   └── tsconfig.json
├── antimatter_wiki/
│   ├── client.html
│   ├── package.json
│   ├── readme.md
│   └── server.js
├── braid-http/
│   ├── braid-http-client.js
│   ├── braid-http-server.js
│   ├── contributing.md
│   ├── demos/
│   │   ├── blog/
│   │   │   ├── README
│   │   │   ├── certificate
│   │   │   ├── client.html
│   │   │   ├── package.json
│   │   │   ├── private-key
│   │   │   └── server.js
│   │   └── chat/
│   │       ├── README
│   │       ├── certificate
│   │       ├── client.html
│   │       ├── package.json
│   │       ├── private-key
│   │       └── server.js
│   ├── index.js
│   ├── index.mjs
│   ├── package.json
│   ├── package.md
│   ├── readme.md
│   └── test/
│       ├── client.html
│       ├── readme.md
│       ├── server.js
│       ├── test-request.txt
│       └── test-responses.txt
├── json-patch/
│   ├── apply-patch.js
│   ├── package.json
│   ├── readme.md
│   └── test.js
├── kernel/
│   ├── antimatter.js
│   ├── demos/
│   │   ├── simple/
│   │   │   ├── simple-client.html
│   │   │   └── simple-server.js
│   │   ├── sync9-chat/
│   │   │   ├── chat-server.js
│   │   │   ├── chat.css
│   │   │   ├── chat.html
│   │   │   ├── chat.js
│   │   │   ├── client.js
│   │   │   ├── mobile.css
│   │   │   ├── package.json
│   │   │   ├── settings.css
│   │   │   ├── settings.html
│   │   │   └── worker.js
│   │   └── wiki/
│   │       ├── wiki-client.html
│   │       └── wiki-server.js
│   ├── errors.js
│   ├── http-client.js
│   ├── http-server.js
│   ├── leadertab-shell.js
│   ├── llww.js
│   ├── node.js
│   ├── package.json
│   ├── pipe.js
│   ├── readme.md
│   ├── sqlite-store.js
│   ├── store.js
│   ├── test/
│   │   ├── tests.js
│   │   ├── virtual-p2p.js
│   │   ├── websocket-test.js
│   │   ├── wiki-perf.html
│   │   └── wiki-tester.js
│   ├── websocket-client.js
│   └── websocket-server.js
├── readme.md
├── simple_d_ton/
│   ├── index.js
│   └── package.json
├── simpleton/
│   ├── client.js
│   ├── demo.js
│   ├── index.js
│   ├── index.mjs
│   ├── package.json
│   └── server.js
├── sync9/
│   ├── old-vis/
│   │   ├── visualization.html
│   │   └── visualization.js
│   └── sync9.js
├── util/
│   ├── apply-patch.js
│   ├── braid-bundler.js
│   ├── diff.js
│   ├── require.js
│   └── utilities.js
└── yarnball/
    ├── server.js
    ├── yarnball.html
    └── yarnball.js
Download .txt
SYMBOL INDEX (354 symbols across 42 files)

FILE: antimatter/antimatter.js
  function raw_add_version_group (line 86) | function raw_add_version_group(version_array) {
  function get_parent_and_child_sets (line 98) | function get_parent_and_child_sets(children) {
  function find_one_bubble (line 145) | function find_one_bubble(bottom, children, child_sets, restricted) {
  function add_version_group (line 199) | function add_version_group(version_array) {
  function copy_fissures (line 367) | function copy_fissures(fs) {
  function remove_ancestors (line 448) | function remove_ancestors(v) {
  function check_ackme_count (line 646) | function check_ackme_count(ackme) {
  function add_full_ack_leaves (line 690) | function add_full_ack_leaves(ackme, conn) {
  function cancel_ackmes (line 851) | function cancel_ackmes() {
  function create_fissure (line 855) | function create_fissure(peer, conn) {
  function resolve_fissures (line 866) | function resolve_fissures() {
  function prune (line 895) | function prune(just_checking, t, just_versions) {
  function raw_read (line 1049) | function raw_read(x, is_anc) {
  function generate_update_message (line 1118) | function generate_update_message(version) {
  function recurse (line 1205) | function recurse(x) {
  function resolve_path (line 1433) | function resolve_path(parse) {
  function recurse (line 1539) | function recurse(version) {
  function recurse (line 1565) | function recurse(version) {
  function add_ins (line 1667) | function add_ins(offset, ins, sort_key, end_cap, is_row_header) {
  function add_del (line 1685) | function add_del(offset, del, ins) {
  function helper (line 1697) | function helper(node, _version, end_cap, is_row_header) {
  function set_nnnext (line 1766) | function set_nnnext(node, next) {
  function do_line (line 1772) | function do_line(node, version) {
  function add_to_nexts (line 1931) | function add_to_nexts(nexts, to) {
  function traverse (line 2066) | function traverse(node, prev, version) {
  function helper (line 2114) | function helper(node, prev, version) {
  function binarySearch (line 2134) | function binarySearch(ar, compare_fn) {

FILE: antimatter_ts/antimatter.js
  function raw_add_version_group (line 84) | function raw_add_version_group(version_array) {
  function get_parent_and_child_sets (line 96) | function get_parent_and_child_sets(children) {
  function find_one_bubble (line 143) | function find_one_bubble(bottom, children, child_sets, restricted) {
  function add_version_group (line 197) | function add_version_group(version_array) {
  function copy_fissures (line 361) | function copy_fissures(fs) {
  function remove_ancestors (line 443) | function remove_ancestors(v) {
  function check_marco_count (line 641) | function check_marco_count(marco) {
  function add_full_ack_leaves (line 685) | function add_full_ack_leaves(marco, conn) {
  function cancel_marcos (line 847) | function cancel_marcos() {
  function create_fissure (line 851) | function create_fissure(peer, conn) {
  function resolve_fissures (line 862) | function resolve_fissures() {
  function prune (line 891) | function prune(just_checking, t, just_versions) {
  function raw_read (line 1046) | function raw_read(x, is_anc) {
  function generate_set_message (line 1115) | function generate_set_message(version) {
  function recurse (line 1202) | function recurse(x) {
  function resolve_path (line 1429) | function resolve_path(parse) {
  function recurse (line 1535) | function recurse(version) {
  function recurse (line 1561) | function recurse(version) {
  function add_ins (line 1662) | function add_ins(offset, ins, sort_key, end_cap, is_row_header) {
  function add_del (line 1680) | function add_del(offset, del, ins) {
  function helper (line 1692) | function helper(node, _version, end_cap, is_row_header) {
  function set_nnnext (line 1761) | function set_nnnext(node, next) {
  function do_line (line 1767) | function do_line(node, version) {
  function add_to_nexts (line 1926) | function add_to_nexts(nexts, to) {
  function traverse (line 2061) | function traverse(node, prev, version) {
  function helper (line 2109) | function helper(node, prev, version) {
  function binarySearch (line 2129) | function binarySearch(ar, compare_fn) {

FILE: antimatter_ts/random002.js
  function create_rand (line 4) | function create_rand(seed) {

FILE: antimatter_ts/src/antimatter_crdt.ts
  function raw_add_version_group (line 69) | function raw_add_version_group(version_array) {
  function get_parent_and_child_sets (line 81) | function get_parent_and_child_sets(children) {
  function find_one_bubble (line 128) | function find_one_bubble(bottom, children, child_sets, restricted) {
  function add_version_group (line 182) | function add_version_group(version_array) {
  function copy_fissures (line 346) | function copy_fissures(fs) {
  function remove_ancestors (line 428) | function remove_ancestors(v) {
  function check_marco_count (line 626) | function check_marco_count(marco) {
  function add_full_ack_leaves (line 670) | function add_full_ack_leaves(marco, conn) {
  function cancel_marcos (line 832) | function cancel_marcos() {
  function create_fissure (line 836) | function create_fissure(peer, conn) {
  function resolve_fissures (line 847) | function resolve_fissures() {
  function prune (line 876) | function prune(just_checking, t, just_versions) {

FILE: antimatter_ts/src/json_crdt.ts
  function raw_read (line 55) | function raw_read(x, is_anc) {
  function generate_set_message (line 124) | function generate_set_message(version) {
  function recurse (line 210) | function recurse(x) {
  function resolve_path (line 436) | function resolve_path(parse) {
  function recurse (line 542) | function recurse(version) {
  function recurse (line 568) | function recurse(version) {

FILE: antimatter_ts/src/sequence_crdt.ts
  type Version (line 1) | type Version = string;
  type Node (line 3) | type Node = {
  function add_ins (line 48) | function add_ins(offset, ins, sort_key, end_cap, is_row_header) {
  function add_del (line 66) | function add_del(offset, del, ins) {
  function helper (line 78) | function helper(node, _version, end_cap = undefined, is_row_header = und...
  function set_nnnext (line 147) | function set_nnnext(node, next) {
  function do_line (line 153) | function do_line(node, version) {
  function add_to_nexts (line 312) | function add_to_nexts(nexts: Node[], to: Node) {
  function traverse (line 447) | function traverse(node, prev, version) {
  function helper (line 495) | function helper(node, prev, version) {
  function binarySearch (line 515) | function binarySearch<T>(ar: T[], compare_fn: (x: T) => number): number {

FILE: antimatter_wiki/server.js
  function ensure_antimatter (line 22) | async function ensure_antimatter(key) {
  function respond_with_client (line 107) | function respond_with_client (req, res) {
  function ping (line 147) | function ping() {

FILE: braid-http/braid-http-client.js
  function braidify_http (line 7) | function braidify_http (http) {
  function braid_fetch (line 143) | async function braid_fetch (url, params = {}) {
  function handle_fetch_stream (line 315) | async function handle_fetch_stream (stream, cb) {
  method read (line 362) | read (input) {
  function parse_update (line 425) | function parse_update (state) {
  function parse_headers (line 451) | function parse_headers (input) {
  function parse_content_range (line 501) | function parse_content_range (range_string) {
  function parse_body (line 505) | function parse_body (state) {
  function extra_headers (line 646) | function extra_headers (headers) {
  function extractHeader (line 666) | function extractHeader(input) {

FILE: braid-http/braid-http-server.js
  function generate_patches (line 28) | function generate_patches(res, patches) {
  function parse_patches (line 67) | function parse_patches (req, cb) {
  function parse_update (line 79) | function parse_update (req, cb) {
  function parse_content_range (line 175) | function parse_content_range (range_string) {
  function braidify (line 182) | function braidify (req, res, next) {
  function send_update (line 284) | function send_update(res, data, url, peer) {
  function extractHeader (line 388) | function extractHeader(input) {

FILE: braid-http/demos/blog/server.js
  function getter (line 35) | function getter (req, res) {
  function log_request (line 110) | function log_request (req, res, next) {
  function free_the_cors (line 114) | function free_the_cors (req, res, next) {

FILE: braid-http/demos/chat/server.js
  function free_the_cors (line 80) | function free_the_cors (req, res, next) {

FILE: json-patch/apply-patch.js
  function apply_patch (line 1) | function apply_patch (obj, range, content) {

FILE: kernel/antimatter.js
  method set (line 3) | set (args) {
  method ack (line 60) | ack (args) {
  method fissure (line 87) | fissure ({key, fissure, origin}) {
  method disconnected (line 122) | disconnected ({key, name, versions, parents, time, origin}) {
  method welcome (line 193) | welcome (args) {
  function add_full_ack_leaf (line 417) | function add_full_ack_leaf(node, resource, version) {
  function check_ack_count (line 459) | function check_ack_count(node, key, resource, version) {
  function start_prune (line 519) | function start_prune (node, resource) {

FILE: kernel/demos/sync9-chat/chat-server.js
  function get_body (line 94) | async function get_body(req) {
  function serve_file (line 104) | async function serve_file(req, res) {
  function update_messages (line 214) | function update_messages(new_val) {
  function add_users (line 231) | function add_users(user_dict){
  function get_name (line 235) | function get_name(message){
  function save_token (line 246) | function save_token(token) {

FILE: kernel/demos/sync9-chat/chat.js
  function render_username (line 59) | function render_username(user_id) {
  function format_header (line 63) | function format_header (msg) {
  function format_message (line 77) | function format_message(msg, i, msgs, extra_classes) {
  function draw_typing_indicator (line 93) | function draw_typing_indicator(names) {
  function update_users (line 117) | function update_users (new_users) {
  function update_messages (line 126) | function update_messages(new_val) {
  function reset_text (line 164) | function reset_text(){
  function submit (line 177) | function submit() {
  function set_typing (line 197) | function set_typing(text) {
  function set_not_typing (line 203) | function set_not_typing () {
  function update_typing (line 211) | function update_typing() {
  function generate_username (line 255) | function generate_username () {
  function set_username (line 262) | function set_username (name) {
  function update_stats (line 280) | function update_stats () {

FILE: kernel/demos/sync9-chat/client.js
  function update_web_slider (line 5) | async function update_web_slider() {
  function send_notification (line 13) | async function send_notification() {
  function subscribe (line 29) | async function subscribe() {
  function get_subscription_string (line 43) | async function get_subscription_string () {
  function unsubscribe (line 60) | async function unsubscribe() {
  function url_base64_to_uint8_array (line 75) | function url_base64_to_uint8_array(base64_string) {
  function input_size (line 91) | function input_size () {
  function screen_size (line 120) | function screen_size () {

FILE: kernel/errors.js
  function report (line 1) | function report (method, error) {
  method get (line 9) | get (args) {
  method set (line 30) | set (args) {
  method welcome (line 57) | welcome (args) {
  method forget (line 116) | forget (args) {
  method ack (line 123) | ack (args) {
  method fissure (line 144) | fissure ({key, fissure, origin}) {

FILE: kernel/http-client.js
  function send (line 29) | function send(args) {
  function sets_from_stream (line 44) | function sets_from_stream(stream, callback, finished) {
  function send_get (line 171) | function send_get (msg) {
  function send_set (line 220) | function send_set (msg) {
  method enabled (line 266) | enabled() {return enabled}
  method enable (line 267) | enable()  {nlog('ENABLING PIPE', pipe.id); enabled = true; }
  method disable (line 268) | disable() {nlog('DISABLING PIPE',pipe.id); enabled = false; controller.a...
  method toggle (line 269) | toggle()  {if (enabled) {disable()} else enable()}

FILE: kernel/http-server.js
  function writePatches (line 14) | function writePatches(patches) {
  function readPatches (line 43) | function readPatches(n, stream, cb) {
  function responsePipe (line 86) | function responsePipe(res, id) {
  function handleHttpResponse (line 140) | function handleHttpResponse(req, res) {
  function cors (line 253) | function cors(req, res) {

FILE: kernel/leadertab-shell.js
  method upgrade (line 64) | upgrade(db) {
  function startElection (line 125) | async function startElection(local_eligible) {
  function resign (line 142) | function resign() {
  function becomeLeader (line 161) | async function becomeLeader() {
  function subscribe (line 233) | function subscribe(key) {
  function handleCommand (line 250) | function handleCommand(command) {
  function send (line 302) | function send(message) {
  function recvState (line 313) | function recvState(message) {
  function pingLeader (line 320) | function pingLeader(time) {

FILE: kernel/llww.js
  method add_version (line 5) | add_version (version, parents, patches) {
  method read (line 9) | read (version) {
  method generate_braid (line 14) | generate_braid (versions) {
  function apply_patch (line 31) | function apply_patch (patch, resource) {

FILE: kernel/node.js
  method send (line 54) | send(args) {
  function remove_ancestors (line 333) | function remove_ancestors (v) {
  function send_error (line 357) | function send_error() {
  function default_val_for (line 511) | function default_val_for (key) {
  function create_resource (line 525) | function create_resource(resource = {}) {
  function pattern_matcher (line 608) | function pattern_matcher () {

FILE: kernel/pipe.js
  function on_pong (line 22) | function on_pong() {
  method send (line 45) | send (args) {
  method recv (line 108) | recv (args) {
  method connected (line 175) | connected () {
  method disconnected (line 225) | disconnected () {
  method printy_stuff (line 251) | printy_stuff (key) {

FILE: kernel/sqlite-store.js
  method get (line 19) | get(key) {
  method set (line 23) | set(key, data) {
  method del (line 26) | del(key) {
  method list_keys (line 29) | list_keys() {

FILE: kernel/store.js
  function fastforward (line 52) | function fastforward() {
  function add (line 79) | function add(key, x) {
  function compress (line 94) | function compress(key) {
  function find_open_index (line 110) | function find_open_index(ab, key, intermediate) {

FILE: kernel/test/tests.js
  method add_frame (line 41) | add_frame() {}
  function add_peer (line 45) | function add_peer (node, peer_number) {
  function make_alphabet (line 50) | function make_alphabet (node, peer_number) {
  function save_node_copy (line 65) | function save_node_copy(node) {
  function step (line 79) | function step(frame_num) {
  function create_random_edit (line 126) | function create_random_edit(resource, letters) {
  function setup_test (line 145) | function setup_test () {
  function evaluate_trial (line 194) | function evaluate_trial (trial_num) {
  function run_trials (line 242) | function run_trials () {
  function run_trial (line 251) | function run_trial (trial_num) {
  function next_trial (line 272) | function next_trial () {
  function run_step (line 287) | function run_step () {

FILE: kernel/test/virtual-p2p.js
  method setup (line 7) | setup () {
  method wrapup (line 64) | wrapup (cb) {
  method receive_message (line 120) | receive_message (peer) {
  method toggle_pipe (line 135) | toggle_pipe () {

FILE: kernel/test/websocket-test.js
  method setup (line 58) | setup () {
  method wrapup (line 89) | wrapup (cb) {
  method die (line 118) | die (cb) {
  method toggle_pipe (line 127) | toggle_pipe () {

FILE: kernel/test/wiki-tester.js
  method on (line 20) | on(event_type, func) {
  method close (line 25) | close() {
  method send (line 47) | send(msg) {
  method terminate (line 69) | terminate() {
  method on (line 94) | on(event_type, func) {
  method send (line 98) | send(msg) {
  method begin (line 136) | begin(key) {
  method end (line 141) | end(key) {
  method mark (line 147) | mark(key) {
  method print (line 151) | print() {
  function main (line 162) | async function main() {
  function run_experiment (line 230) | async function run_experiment(rand_seed) {
  function run_experiment_from_actions (line 350) | async function run_experiment_from_actions(actions) {
  function create_db (line 526) | function create_db() {
  function create_server (line 536) | async function create_server(db) {
  function create_client (line 563) | function create_client() {

FILE: kernel/websocket-client.js
  method enabled (line 96) | enabled() {return enabled}
  method enable (line 97) | enable()  {nlog('ENABLING PIPE', pipe.id);enabled = true; connect()}
  method disable (line 98) | disable() {nlog('DISABLING PIPE',pipe.id);enabled = false; disconnect()}
  method toggle (line 99) | toggle()  {if (enabled) {disable()} else enable()}

FILE: kernel/websocket-server.js
  function connect (line 28) | function connect () {
  function disconnect (line 33) | function disconnect () {
  function send (line 36) | function send (msg) {

FILE: simple_d_ton/index.js
  function simple_d_ton (line 10) | async function simple_d_ton(req, res, options = {}) {
  function get_resource (line 438) | async function get_resource(key, db_folder) {
  function file_sync (line 470) | async function file_sync(db_folder, filename_base, process_delta, get_in...
  function defrag_dt (line 597) | function defrag_dt(doc) {
  function OpLog_get_patches (line 603) | function OpLog_get_patches(bytes, op_runs) {
  function parseDT (line 653) | function parseDT(byte_array) {
  function OpLog_create_bytes (line 748) | function OpLog_create_bytes(version, parents, pos, ins) {
  function OpLog_remote_to_local (line 895) | function OpLog_remote_to_local(doc, frontier) {
  function encode_version (line 909) | function encode_version(agent, seq) {
  function decode_version (line 913) | function decode_version(v) {
  function v_eq (line 919) | function v_eq(v1, v2) {
  function get_xf_patches (line 923) | function get_xf_patches(doc, v) {
  function relative_to_absolute_patches (line 943) | function relative_to_absolute_patches(patches) {
  function create_avl_tree (line 1071) | function create_avl_tree(on_rotate) {
  function count_code_points (line 1184) | function count_code_points(str) {
  function index_to_codePoints (line 1193) | function index_to_codePoints(str, index) {
  function codePoints_to_index (line 1204) | function codePoints_to_index(str, codePoints) {

FILE: simpleton/client.js
  function simpleton_client (line 20) | function simpleton_client(url, { apply_remote_update, generate_local_dif...
  function get_char_size (line 123) | function get_char_size(s, i) {
  function count_code_points (line 128) | function count_code_points(str) {
  function braid_fetch_wrapper (line 137) | async function braid_fetch_wrapper(url, params) {

FILE: simpleton/server.js
  function handle (line 11) | async function handle(req, res, options = {}) {
  function get_resource (line 295) | async function get_resource(key, db_folder) {
  function file_sync (line 327) | async function file_sync(db_folder, filename_base, process_delta, get_in...
  function defrag_dt (line 454) | function defrag_dt(doc) {
  function parseDT (line 460) | function parseDT(byte_array) {
  function OpLog_create_bytes (line 556) | function OpLog_create_bytes(version, parents, pos, ins) {
  function OpLog_remote_to_local (line 705) | function OpLog_remote_to_local(doc, frontier) {
  function encode_version (line 722) | function encode_version(agent, seq) {
  function decode_version (line 726) | function decode_version(v) {
  function v_eq (line 732) | function v_eq(v1, v2) {
  function get_xf_patches (line 736) | function get_xf_patches(doc, v) {
  function relative_to_absolute_patches (line 756) | function relative_to_absolute_patches(patches) {
  function create_avl_tree (line 896) | function create_avl_tree(on_rotate) {

FILE: sync9/old-vis/visualization.js
  function loop (line 45) | function loop() {
  function draw_frame (line 70) | function draw_frame(di, percent) {
  function draw_text (line 113) | function draw_text(c, g, text, x, y, color, x_align, y_align, font) {
  function draw_network (line 121) | function draw_network(c, g, frames, fi, percent, x, y, w, h, r) {
  function draw_fissure_dag (line 328) | function draw_fissure_dag(c, g, frames, fi, pi, x, y, w, h, r) {
  function draw_time_dag (line 432) | function draw_time_dag(c, g, frames, fi, pi, x, y, w, h, r) {
  function draw_space_dag (line 604) | function draw_space_dag(c, g, S, x, y) {
  function lerp (line 634) | function lerp(t0, v0, t1, v1, t) {
  function rot (line 643) | function rot(a, r) {

FILE: sync9/sync9.js
  method read (line 6) | read (version) {
  method add_version (line 10) | add_version (version, parents, patches, hint) {
  method generate_braid (line 15) | generate_braid (versions) {
  method prune (line 35) | prune (bubbles) {
  function generate_braid (line 41) | function generate_braid(resource, is_anc) {
  function space_dag_generate_braid (line 120) | function space_dag_generate_braid(S, resource, version, is_anc) {
  function prune (line 175) | function prune(resource, to_bubble) {
  function space_dag_prune (line 275) | function space_dag_prune(S, to_bubble, seen_annotations) {
  function add_version (line 348) | function add_version(resource, version, parents, patches, sort_keys, is_...
  function read (line 471) | function read(x, is_anc) {
  function read_raw (line 495) | function read_raw(x, is_anc, annotations) {
  function create_space_dag_node (line 557) | function create_space_dag_node(version, elems, end_cap, sort_key) {
  function space_dag_get (line 569) | function space_dag_get(S, i, is_anc) {
  function space_dag_set (line 582) | function space_dag_set(S, i, v, is_anc) {
  function space_dag_length (line 593) | function space_dag_length(S, is_anc) {
  function space_dag_break_node (line 601) | function space_dag_break_node(node, x, end_cap, new_next) {
  function space_dag_add_version (line 627) | function space_dag_add_version(S, version, splices, sort_key, is_anc) {
  function traverse_space_dag (line 731) | function traverse_space_dag(S, f, cb, view_deleted, tail_cb) {
  function binarySearch (line 757) | function binarySearch(ar, compare_fn) {

FILE: util/apply-patch.js
  function apply_patch (line 1) | function apply_patch (obj, range, content) {

FILE: util/diff.js
  function diff_convert_to_my_format (line 2) | function diff_convert_to_my_format(d, factor) {
  function diff_main (line 80) | function diff_main(text1, text2, cursor_pos) {
  function diff_compute_ (line 131) | function diff_compute_(text1, text2) {
  function diff_bisect_ (line 194) | function diff_bisect_(text1, text2) {
  function diff_bisectSplit_ (line 310) | function diff_bisectSplit_(text1, text2, x, y) {
  function diff_commonPrefix (line 331) | function diff_commonPrefix(text1, text2) {
  function diff_commonSuffix (line 362) | function diff_commonSuffix(text1, text2) {
  function diff_halfMatch_ (line 398) | function diff_halfMatch_(text1, text2) {
  function diff_cleanupMerge (line 486) | function diff_cleanupMerge(diffs) {
  function cursor_normalize_diff (line 620) | function cursor_normalize_diff (diffs, cursor_pos) {
  function fix_cursor (line 665) | function fix_cursor (diffs, cursor_pos) {
  function merge_tuples (line 714) | function merge_tuples (diffs, start, length) {

FILE: util/require.js
  function require (line 2) | function require (thing) {

FILE: util/utilities.js
  method get (line 38) | get (k) { return Object.values(data[k] || dict()) }
  method add (line 39) | add (k1, k2, value) {
  method delete (line 46) | delete (k, k2) { delete data[k][k2]; counts[k]-- }
  method delete_all (line 47) | delete_all (k) { delete data[k]; delete counts[k] }
  method has (line 48) | has (k, k2)    { return data[k] && k2 in data[k] }
  method count (line 49) | count (k)      { return counts[k] || 0}
  method toString (line 50) | toString ()    { return JSON.stringify({data, counts}, null, '    ') }
  function deep_equals (line 90) | function deep_equals(a, b) {
  function parse_patch (line 107) | function parse_patch(patch) {
  function func (line 177) | function func() {

FILE: yarnball/server.js
  function wal_compactor (line 43) | async function wal_compactor() {
  function create_loom_server (line 85) | function create_loom_server(L) {
  function create_loom (line 125) | function create_loom(L, send) {
  function create_space_dag_node (line 809) | function create_space_dag_node(version, elems, end_cap, sort_key) {
  function space_dag_apply_bubbles (line 821) | function space_dag_apply_bubbles(S, to_bubble) {
  function space_dag_get (line 881) | function space_dag_get(S, i, is_anc) {
  function space_dag_set (line 894) | function space_dag_set(S, i, v, is_anc) {
  function space_dag_length (line 905) | function space_dag_length(S, is_anc) {
  function space_dag_break_node (line 913) | function space_dag_break_node(node, x, end_cap, new_next) {
  function space_dag_add_version (line 927) | function space_dag_add_version(S, version, splices, sort_key, is_anc) {
  function traverse_space_dag (line 1030) | function traverse_space_dag(S, f, cb, view_deleted, tail_cb) {
  function parse_patch (line 1053) | function parse_patch(patch) {
  function binarySearch (line 1068) | function binarySearch(ar, compare_fn) {

FILE: yarnball/yarnball.js
  function create_yarnball_client (line 4) | function create_yarnball_client(base_url) {
  function create_loom_client (line 48) | function create_loom_client(L, url, on_change) {
  function create_loom (line 91) | function create_loom(L, send) {
  function create_space_dag_node (line 718) | function create_space_dag_node(version, elems, end_cap, sort_key) {
  function space_dag_generate_braid (line 730) | function space_dag_generate_braid(S, version, is_anc) {
  function space_dag_apply_bubbles (line 783) | function space_dag_apply_bubbles(S, to_bubble) {
  function space_dag_get (line 843) | function space_dag_get(S, i, is_anc) {
  function space_dag_set (line 856) | function space_dag_set(S, i, v, is_anc) {
  function space_dag_length (line 867) | function space_dag_length(S, is_anc) {
  function space_dag_break_node (line 875) | function space_dag_break_node(node, x, end_cap, new_next) {
  function space_dag_add_version (line 889) | function space_dag_add_version(S, version, splices, sort_key, is_anc) {
  function traverse_space_dag (line 1008) | function traverse_space_dag(S, f, cb, view_deleted, tail_cb) {
  function parse_patch (line 1031) | function parse_patch(patch) {
  function binarySearch (line 1046) | function binarySearch(ar, compare_fn) {
Condensed preview — 102 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (953K chars).
[
  {
    "path": ".gitignore",
    "chars": 2212,
    "preview": "# Any certificates\ncerts/\ncertificates/\n*.pem\n\n# Database stuff\ndb.sqlite*\n\n# Builds\nbraid-bundle.js\nbuilds/\n\n# VS Code\n"
  },
  {
    "path": "antimatter/antimatter.js",
    "chars": 76085,
    "preview": "/// # Software Architecture\n/// The software is architected into three objects:\n///\n/// ``` js\n/// var {create_antimatte"
  },
  {
    "path": "antimatter/doc.html",
    "chars": 2321,
    "preview": "<head>\n<link rel=\"stylesheet\" href=\"https://unpkg.com/@highlightjs/cdn-assets@11.1.0/styles/default.min.css\">\n</head>\n\n<"
  },
  {
    "path": "antimatter/package.json",
    "chars": 334,
    "preview": "{\n  \"name\": \"@braidjs/antimatter\",\n  \"version\": \"0.0.34\",\n  \"description\": \"antimatter: a pruning algorithm for CRDTs an"
  },
  {
    "path": "antimatter/readme.md",
    "chars": 2294,
    "preview": "# MOVED TO https://github.com/braid-org/antimatter\n\n--\n\n# antimatter: an algorithm that prunes CRDT/OT history\n\n[Antimat"
  },
  {
    "path": "antimatter/test.html",
    "chars": 9669,
    "preview": "<body></body>\n<script>\n\nlet real_random = Math.random\n\nfunction print(...args) {\n    let d = document.createElement('div"
  },
  {
    "path": "antimatter_ts/antimatter.js",
    "chars": 75600,
    "preview": "/// # Software Architecture\n/// The software is architected into three objects:\n///\n/// ``` js\n/// var {create_antimatte"
  },
  {
    "path": "antimatter_ts/doc.html",
    "chars": 2320,
    "preview": "<head>\n<link rel=\"stylesheet\" href=\"https://unpkg.com/@highlightjs/cdn-assets@11.1.0/styles/default.min.css\">\n</head>\n\n<"
  },
  {
    "path": "antimatter_ts/package.json",
    "chars": 492,
    "preview": "{\n  \"name\": \"@braidjs/antimatter\",\n  \"version\": \"0.0.23\",\n  \"description\": \"antimatter: a pruning algorithm for CRDTs an"
  },
  {
    "path": "antimatter_ts/random002.js",
    "chars": 8256,
    "preview": "\n// the next two functions added by me\n\nfunction create_rand(seed) {\n  if (typeof(seed) == 'string') {\n    var t = new M"
  },
  {
    "path": "antimatter_ts/readme.md",
    "chars": 2238,
    "preview": "# antimatter: an algorithm that prunes CRDT/OT history\n\n[Antimatter](https://braid.org/antimatter) is the world's first "
  },
  {
    "path": "antimatter_ts/src/antimatter_crdt.ts",
    "chars": 33347,
    "preview": "/// # Software Architecture\n/// The software is architected into three objects:\n///\n/// ``` js\n/// let {create_antimatte"
  },
  {
    "path": "antimatter_ts/src/json_crdt.ts",
    "chars": 22062,
    "preview": "/// - *json_crdt*: created using `create_json_crdt`, this object is a pruneable\n///   JSON CRDT — \"JSON\" meaning it repr"
  },
  {
    "path": "antimatter_ts/src/sequence_crdt.ts",
    "chars": 18622,
    "preview": "type Version = string;\n\ntype Node = {\n  /// globally unique string\n  version: Version,\n  /// a string or array represent"
  },
  {
    "path": "antimatter_ts/test.html",
    "chars": 9485,
    "preview": "<body></body>\n<script>\n\nlet real_random = Math.random\n\nfunction print(...args) {\n    let d = document.createElement('div"
  },
  {
    "path": "antimatter_ts/tsconfig.json",
    "chars": 61,
    "preview": "{\n    \"compilerOptions\": {\n        \"lib\": [\"ES2017\"],\n    }\n}"
  },
  {
    "path": "antimatter_wiki/client.html",
    "chars": 14926,
    "preview": "<body>\n    <script src=\"https://unpkg.com/@braidjs/antimatter@0.0.20\"></script>\n    <script src=\"https://invisible-colle"
  },
  {
    "path": "antimatter_wiki/package.json",
    "chars": 358,
    "preview": "{\n  \"name\": \"@braidjs/antimatter_wiki\",\n  \"version\": \"0.1.5\",\n  \"description\": \"collaborative wiki using antimatter sync"
  },
  {
    "path": "antimatter_wiki/readme.md",
    "chars": 527,
    "preview": "# MOVED TO https://github.com/braid-org/antimatter_wiki\n\n# Antimatter Wiki\n\nA collaborative wiki based on the [Antimatte"
  },
  {
    "path": "antimatter_wiki/server.js",
    "chars": 5905,
    "preview": "\nconsole.log(require('./package.json').version)\n\nvar fs = require('fs')\nvar fs_p = require('fs/promises')\n\nvar {antimatt"
  },
  {
    "path": "braid-http/braid-http-client.js",
    "chars": 24221,
    "preview": "// var peer = Math.random().toString(36).substr(2)\n\n// ***************************\n// http\n// **************************"
  },
  {
    "path": "braid-http/braid-http-server.js",
    "chars": 15664,
    "preview": "var assert = require('assert')\n\n// Return a string of patches in pseudoheader format.\n//\n//   The `patches` argument can"
  },
  {
    "path": "braid-http/contributing.md",
    "chars": 383,
    "preview": "# Contributing to Braid-HTTP\n\nThis is core code, and I'd like it to meet everyone's needs!  I welcome\nsuggestions and im"
  },
  {
    "path": "braid-http/demos/blog/README",
    "chars": 325,
    "preview": "This is a demo blog / chat.\n\nTo run the demo:\n\n    git clone https://github.com/braid-work/braidjs.git\n    cd braidjs/de"
  },
  {
    "path": "braid-http/demos/blog/certificate",
    "chars": 1228,
    "preview": "-----BEGIN CERTIFICATE-----\nMIIDXTCCAkWgAwIBAgIJALgm2/aRZmh6MA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV\nBAYTAkFVMRMwEQYDVQQIDApTb21"
  },
  {
    "path": "braid-http/demos/blog/client.html",
    "chars": 5639,
    "preview": "<script type=coffee>\n# ##########################\n# UI written in coffeescript\n# \n\nfontFamily = 'avenext, avenir, sans'\n"
  },
  {
    "path": "braid-http/demos/blog/package.json",
    "chars": 90,
    "preview": "{\n  \"dependencies\": {\n    \"express\": \"^4.19.2\",\n    \"http2-express-bridge\": \"^1.0.7\"\n  }\n}"
  },
  {
    "path": "braid-http/demos/blog/private-key",
    "chars": 1675,
    "preview": "-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAvt7W6Y6IKMhIi/PGLWPMj+jWC/Ne2P2Yhqx9kuUN+hkXF8Ze\nFfIXpKI+JRLObDB7Me/8y4b"
  },
  {
    "path": "braid-http/demos/blog/server.js",
    "chars": 3989,
    "preview": "assert = require('assert')\n\n// Blog Data\nvar resources = {\n    '/blog': [\n        {link: '/post/1'},\n        {link: '/po"
  },
  {
    "path": "braid-http/demos/chat/README",
    "chars": 567,
    "preview": "To run the braidjs chat demo:\n\n    git clone https://github.com/braid-work/braidjs.git\n    cd braidjs/demos/chat\n    npm"
  },
  {
    "path": "braid-http/demos/chat/certificate",
    "chars": 1228,
    "preview": "-----BEGIN CERTIFICATE-----\nMIIDXTCCAkWgAwIBAgIJALgm2/aRZmh6MA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV\nBAYTAkFVMRMwEQYDVQQIDApTb21"
  },
  {
    "path": "braid-http/demos/chat/client.html",
    "chars": 3877,
    "preview": "<script src=\"braid-http-client.js\"></script>\n<script type=\"module\">\n  // Imports\n  import { h, Component, render } from "
  },
  {
    "path": "braid-http/demos/chat/package.json",
    "chars": 90,
    "preview": "{\n  \"dependencies\": {\n    \"express\": \"^4.19.2\",\n    \"http2-express-bridge\": \"^1.0.7\"\n  }\n}"
  },
  {
    "path": "braid-http/demos/chat/private-key",
    "chars": 1675,
    "preview": "-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAvt7W6Y6IKMhIi/PGLWPMj+jWC/Ne2P2Yhqx9kuUN+hkXF8Ze\nFfIXpKI+JRLObDB7Me/8y4b"
  },
  {
    "path": "braid-http/demos/chat/server.js",
    "chars": 3776,
    "preview": "var assert = require('assert')\n\n// Chat Data\nvar resources = {\n    '/chat': [\n        {text: 'Hello!'},\n        {text: '"
  },
  {
    "path": "braid-http/index.js",
    "chars": 304,
    "preview": "// This is the root file for require('braid-http').\n//\n// It combines the client and server files into one file.\n\nvar cl"
  },
  {
    "path": "braid-http/index.mjs",
    "chars": 454,
    "preview": "// This is the root file for es modules:\n//\n//    import {fetch, http} from 'braid-http'\n//\n// This file combines the cl"
  },
  {
    "path": "braid-http/package.json",
    "chars": 752,
    "preview": "{\n  \"name\": \"braid-http\",\n  \"version\": \"0.3.21\",\n  \"description\": \"An implementation of Braid-HTTP for Node.js and Brows"
  },
  {
    "path": "braid-http/package.md",
    "chars": 2212,
    "preview": "# package.json notes\n\nThis package is bundled as both a commonjs and es6-compatible NPM bundle. The\nfactor that enables "
  },
  {
    "path": "braid-http/readme.md",
    "chars": 7460,
    "preview": "# NOTE: This project has moved to [braid-http](https://github.com/braid-org/braid-http)\n\n# Braid-HTTP\n\nThis polyfill lib"
  },
  {
    "path": "braid-http/test/client.html",
    "chars": 4081,
    "preview": "<script src=\"/braid-http-client.js\"></script>\n<script type=module>\nvar fetch = braid_fetch\n\n// Setup the tests\nwindow.te"
  },
  {
    "path": "braid-http/test/readme.md",
    "chars": 3347,
    "preview": "# To Test Braid-HTTP\n\nRun the server with:\n\n```\nnode server.js\n```\n\n### Test the server alone\n\nRun this at your command-"
  },
  {
    "path": "braid-http/test/server.js",
    "chars": 2979,
    "preview": "var braidify = require('../braid-http-server.js')\nvar sendfile = (f, req, res) => res.end(require('fs').readFileSync(req"
  },
  {
    "path": "braid-http/test/test-request.txt",
    "chars": 579,
    "preview": "GET /json HTTP/1.1\r\nHost: localhost:9000\r\nConnection: keep-alive\r\nCache-Control: max-age=0\r\nsec-ch-ua: \"Not/A)Brand\";v=\""
  },
  {
    "path": "braid-http/test/test-responses.txt",
    "chars": 1299,
    "preview": "Read 1 {\"version\":[\"test\"],\"parents\":[\"oldie\"],\"body\":\"{\\\"this\\\":\\\"stuff\\\"}\"}!\nRead 1 {\"version\":[\"test1\"],\"parents\":[\"o"
  },
  {
    "path": "json-patch/apply-patch.js",
    "chars": 3276,
    "preview": "function apply_patch (obj, range, content) {\n\n    // Descend down a bunch of objects until we get to the final object\n  "
  },
  {
    "path": "json-patch/package.json",
    "chars": 202,
    "preview": "{\n  \"name\": \"@braid.org/json-patch\",\n  \"version\": \"1.0.6\",\n  \"description\": \"Patch JSON\",\n  \"main\": \"apply-patch.js\",\n  "
  },
  {
    "path": "json-patch/readme.md",
    "chars": 549,
    "preview": "# JSON Patch\n\nThis library patches JSON objects using the Braid range-patch format.\n\nUsing it:\n```javascript\nvar patch ="
  },
  {
    "path": "json-patch/test.js",
    "chars": 1415,
    "preview": "var assert = require('assert')\n\nvar patch = require('.')\nvar json = {a: \"foo\", b: [1,2,3]}\n\n// Replace 2 with 99\npatch(j"
  },
  {
    "path": "kernel/antimatter.js",
    "chars": 30070,
    "preview": "module.exports = require.antimatter = (node) => ({\n\n    set (args) {\n        var {key, patches, version, parents, origin"
  },
  {
    "path": "kernel/demos/simple/simple-client.html",
    "chars": 930,
    "preview": "<script src=\"../../../builds/braid-bundle.js\"></script>\n<body>\n  <div id=\"out\"></div>\n  <textarea id=\"in\"></textarea>\n  "
  },
  {
    "path": "kernel/demos/simple/simple-server.js",
    "chars": 3090,
    "preview": "var certificate = `-----BEGIN CERTIFICATE-----\nMIIDXTCCAkWgAwIBAgIJANoWGfl3pEeHMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV\nBAYTAlVT"
  },
  {
    "path": "kernel/demos/sync9-chat/chat-server.js",
    "chars": 9068,
    "preview": "var fs = require('fs')\nvar path = require('path')\nvar ws = require('ws')\nrequire('dotenv').config()\n\n// When we have the"
  },
  {
    "path": "kernel/demos/sync9-chat/chat.css",
    "chars": 3893,
    "preview": "@import url('https://fonts.googleapis.com/css2?family=Recursive:wght@300;400&display=swap');\n\nbody {\n    padding: 0;\n   "
  },
  {
    "path": "kernel/demos/sync9-chat/chat.html",
    "chars": 1956,
    "preview": "<!doctype html>\n<html>\n<head>\n   <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">\n   <meta charset=\"u"
  },
  {
    "path": "kernel/demos/sync9-chat/chat.js",
    "chars": 11312,
    "preview": "// Create a node\nvar browser_id = localStorage.browser_id || localStorage.browserId || 'B-' + Math.random().toString(36)"
  },
  {
    "path": "kernel/demos/sync9-chat/client.js",
    "chars": 4101,
    "preview": "var public_vapid_key =\n  \"BB2ikt9eLJydNI-1LpnaRYiogis3ydcUEw6O615fhaHsOsRRHcMZUfVSTNqun6HVb44M6PdfviDJkMWsdTO7XcM\"\n\n\nasy"
  },
  {
    "path": "kernel/demos/sync9-chat/mobile.css",
    "chars": 2698,
    "preview": "#send-msg img{   \n    width:65px;\n    height:65px;\n    \n}\n#send-msg {\n    position:absolute;\n    right:13%;\n    backgrou"
  },
  {
    "path": "kernel/demos/sync9-chat/package.json",
    "chars": 386,
    "preview": "{\n  \"name\": \"sync9-chat\",\n  \"version\": \"0.0.1\",\n  \"description\": \"\",\n  \"author\": \"Braid Working Group\",\n  \"repository\": "
  },
  {
    "path": "kernel/demos/sync9-chat/settings.css",
    "chars": 1014,
    "preview": "body {\n  font-family: sans-serif;\n  padding:10%;\n  text-align:center;\n}\n\n#home-icon {\n  display:inline;\n  width:30px;\n  "
  },
  {
    "path": "kernel/demos/sync9-chat/settings.html",
    "chars": 910,
    "preview": "<!DOCTYPE html>\n<html lang=\"en\">\n\n<head>\n  <meta charset=\"UTF-8\">\n  <meta name=\"viewport\" content=\"width=device-width, i"
  },
  {
    "path": "kernel/demos/sync9-chat/worker.js",
    "chars": 258,
    "preview": "console.log(\"Service Worker Loaded...\");\n\nself.addEventListener(\"push\", e => {\n  const data = e.data.json();\n  console.l"
  },
  {
    "path": "kernel/demos/wiki/wiki-client.html",
    "chars": 22687,
    "preview": "<script src=\"/braid-bundle.js\"></script>\n<script src=\"https://invisible.college/js/marked.min.js\"></script>\n\n<link rel=\""
  },
  {
    "path": "kernel/demos/wiki/wiki-server.js",
    "chars": 1130,
    "preview": "const port = 3007;\n\nrequire('../../../util/braid-bundler.js')\nvar fs = require('fs')\nvar bundle = fs.readFileSync('../.."
  },
  {
    "path": "kernel/errors.js",
    "chars": 7290,
    "preview": "function report (method, error) {\n    if (show_protocol_errors)\n        console.log('PROTOCOL ERROR for ' + method + ': "
  },
  {
    "path": "kernel/http-client.js",
    "chars": 12104,
    "preview": "// This file is still being used with the sync9-chat demo, but Mike will\n// refactor it soon.\n\nvar u = require('utilitie"
  },
  {
    "path": "kernel/http-server.js",
    "chars": 12469,
    "preview": "// This file is still being used with the sync9-chat demo, but Mike will\n// refactor it soon.\n\n// Example braid-peer as "
  },
  {
    "path": "kernel/leadertab-shell.js",
    "chars": 14692,
    "preview": "var util = require('utilities.js');\nvar store = require('store.js');\n\nconst states = {\n    // Don't process incoming com"
  },
  {
    "path": "kernel/llww.js",
    "chars": 1322,
    "preview": "module.exports = require.llww = (resource) => {\n    resource.value = undefined\n\n    return {\n        add_version (versio"
  },
  {
    "path": "kernel/node.js",
    "chars": 27786,
    "preview": "u = require('../util/utilities.js')\n\nmodule.exports = require.node = function create_node(node_data = {}) {\n    var node"
  },
  {
    "path": "kernel/package.json",
    "chars": 506,
    "preview": "{\n  \"name\": \"braid-bus\",\n  \"version\": \"0.0.1\",\n  \"description\": \"\",\n  \"scripts\": {\n    \"test\": \"node test/tests.js\",\n   "
  },
  {
    "path": "kernel/pipe.js",
    "chars": 9722,
    "preview": "// A pipe is a network connection that can get disconnected and reconnected.\n//\n// A pipe can send and receive.  The use"
  },
  {
    "path": "kernel/readme.md",
    "chars": 1918,
    "preview": "# A prototype Braid Kernel\n\nAn abstraction for distributed state.\n\n## Status\n\nWe've built some cool algorithms in here, "
  },
  {
    "path": "kernel/sqlite-store.js",
    "chars": 1175,
    "preview": "\n// options = {\n//     table_name: 'store' // <-- default, a table of this name will be created in sqlite\n// }\n// option"
  },
  {
    "path": "kernel/store.js",
    "chars": 4381,
    "preview": "\n// options = {\n//     compress_if_inactive_time: 4000 // <-- default, means it will compress 4 seconds after the last e"
  },
  {
    "path": "kernel/test/tests.js",
    "chars": 9856,
    "preview": "require('../../sync9/sync9.js')\nrequire('../../util/utilities.js')\n\n//show_debug = true\n\nvar n_peers = 3\nvar n_steps_per"
  },
  {
    "path": "kernel/test/virtual-p2p.js",
    "chars": 6261,
    "preview": "// Tests using a virtual network\n\nmodule.exports = require['virtual-p2p'] = (sim) => (\n    {\n        name: 'virtual',\n  "
  },
  {
    "path": "kernel/test/websocket-test.js",
    "chars": 5807,
    "preview": "// Tests for the braid-websocket protocol\n\nmodule.exports = require['websocket-test'] = (sim) => (\n    {\n        name: '"
  },
  {
    "path": "kernel/test/wiki-perf.html",
    "chars": 34544,
    "preview": "<!DOCTYPE html>\n<html>\n<head>\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">\n<meta charset=\"utf-8\">"
  },
  {
    "path": "kernel/test/wiki-tester.js",
    "chars": 25353,
    "preview": "\nrequire('../../util/utilities.js')\n\nvar page_key = '/foo'\ng_current_server = null\n\ng_debug_WS_messages = []\ng_debug_WS_"
  },
  {
    "path": "kernel/websocket-client.js",
    "chars": 3072,
    "preview": "// Example braid-peer as a web browser client\n\nmodule.exports = require['websocket-client'] = function add_websocket_cli"
  },
  {
    "path": "kernel/websocket-server.js",
    "chars": 1752,
    "preview": "// Example braid-peer as a web server\n// options = {\n//     port: // default is 3007\n//     wss: // default is null, wil"
  },
  {
    "path": "readme.md",
    "chars": 1540,
    "preview": "# The Braidjs Monorepo\n\nBy versioning our code together, it becomes easier to interoperate.\n\n  - Each top-level folder i"
  },
  {
    "path": "simple_d_ton/index.js",
    "chars": 42507,
    "preview": "console.log(\"v13\")\n\nlet { Doc, Branch, OpLog } = require(\"diamond-types-node\")\nlet braidify = require(\"braid-http\").http"
  },
  {
    "path": "simple_d_ton/package.json",
    "chars": 336,
    "preview": "{\n  \"name\": \"simple_d_ton\",\n  \"version\": \"0.0.24\",\n  \"description\": \"Serve diamond-types and simpleton requests.\",\n  \"au"
  },
  {
    "path": "simpleton/client.js",
    "chars": 5751,
    "preview": "// requires braid-http@0.3.14\n// \n// url: simpleton resource endpoint\n//\n// apply_remote_update: ({patches, state}) => {"
  },
  {
    "path": "simpleton/demo.js",
    "chars": 1705,
    "preview": "console.log(\"v9\")\n\nprocess.on(\"uncaughtException\", (e) => console.log(e.stack))\nprocess.on(\"unhandledRejection\", (e) => "
  },
  {
    "path": "simpleton/index.js",
    "chars": 134,
    "preview": "module.exports = {\n    create_simpleton_client: require('./client').create_simpleton_client,\n    handle: require('./serv"
  },
  {
    "path": "simpleton/index.mjs",
    "chars": 250,
    "preview": "import client from './client.js'\nimport server from './server.js'\n\nvar create_simpleton_client = client.create_simpleton"
  },
  {
    "path": "simpleton/package.json",
    "chars": 523,
    "preview": "{\n  \"name\": \"simpleton_braid\",\n  \"version\": \"0.2.2\",\n  \"description\": \"An implementation of the simpleton protocol for N"
  },
  {
    "path": "simpleton/server.js",
    "chars": 33663,
    "preview": "\nconsole.log(\"simpleton.js: v163\")\n\nlet { Doc, Branch, OpLog } = require(\"diamond-types-node\")\nlet braidify = require(\"b"
  },
  {
    "path": "sync9/old-vis/visualization.html",
    "chars": 506,
    "preview": "<script src=\"../../util/require.js\"></script>\n<script src=\"../../util/utilities.js\"></script>\n<script src=\"../../sync9/s"
  },
  {
    "path": "sync9/old-vis/visualization.js",
    "chars": 25105,
    "preview": "module.exports = require.visualization = function create_vis(sim) {\n    var tau = Math.PI*2\n    var debug_frames = []\n  "
  },
  {
    "path": "sync9/sync9.js",
    "chars": 28342,
    "preview": "// Adapted from https://github.com/dglittle/cdn/blob/gh-pages/sync9_047.html\n\nmodule.exports = require.sync9 = function "
  },
  {
    "path": "util/apply-patch.js",
    "chars": 3956,
    "preview": "function apply_patch (obj, range, content) {\n\n    // Descend down a bunch of objects until we get to the final object\n  "
  },
  {
    "path": "util/braid-bundler.js",
    "chars": 787,
    "preview": "// Bundles up the client javascript file.\nvar files = [\n    'util/require.js',\n    'util/utilities.js',\n    'sync9/sync9"
  },
  {
    "path": "util/diff.js",
    "chars": 25189,
    "preview": "\nfunction diff_convert_to_my_format(d, factor) {\n    if (factor === undefined) factor = 1\n    var x = []\n    var ii = 0\n"
  },
  {
    "path": "util/require.js",
    "chars": 400,
    "preview": "// These 8 lines let browsers import modules with require().\nfunction require (thing) {\n    thing = thing.split('/')\n   "
  },
  {
    "path": "util/utilities.js",
    "chars": 15145,
    "preview": "// ===============================================\n//\n//   Utilities\n//\n\nis_browser = typeof process !== 'object' || typ"
  },
  {
    "path": "yarnball/server.js",
    "chars": 38827,
    "preview": "\n// require('child_process').execSync('cp ./log-old.txt ./log.txt', {stdio: 'inherit'})\n// require('child_process').exec"
  },
  {
    "path": "yarnball/yarnball.html",
    "chars": 397,
    "preview": "\n<script src=\"https://bloop.monster/yarnball.js\"></script>\n<body></body>\n<script>\n\nvar yb = create_yarnball_client('wss:"
  },
  {
    "path": "yarnball/yarnball.js",
    "chars": 38023,
    "preview": "\nconsole.log('yarnball 0.001')\n\nfunction create_yarnball_client(base_url) {\n    var self = {}\n    var conns = {}\n\n    se"
  }
]

About this extraction

This page contains the full source code of the braid-org/braidjs GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 102 files (895.6 KB), approximately 232.5k tokens, and a symbol index with 354 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!