[
  {
    "path": ".gitignore",
    "content": "# Any certificates\ncerts/\ncertificates/\n*.pem\n\n# Database stuff\ndb.sqlite*\n\n# Builds\nbraid-bundle.js\nbuilds/\n\n# VS Code\n.vscode/\n\n# Basic Nodejs Gitignore\n# Logs\nlogs\n*.log\nnpm-debug.log*\nyarn-debug.log*\nyarn-error.log*\nlerna-debug.log*\n\n# Diagnostic reports (https://nodejs.org/api/report.html)\nreport.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json\n\n# Runtime data\npids\n*.pid\n*.seed\n*.pid.lock\n\n# Directory for instrumented libs generated by jscoverage/JSCover\nlib-cov\n\n# Coverage directory used by tools like istanbul\ncoverage\n*.lcov\n\n# nyc test coverage\n.nyc_output\n\n# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)\n.grunt\n\n# Bower dependency directory (https://bower.io/)\nbower_components\n\n# node-waf configuration\n.lock-wscript\n\n# Compiled binary addons (https://nodejs.org/api/addons.html)\nbuild/Release\n\n# Dependency directories\nnode_modules/\njspm_packages/\n\n# Snowpack dependency directory (https://snowpack.dev/)\nweb_modules/\n\n# TypeScript cache\n*.tsbuildinfo\n\n# Optional npm cache directory\n.npm\n\n# Optional eslint cache\n.eslintcache\n\n# Microbundle cache\n.rpt2_cache/\n.rts2_cache_cjs/\n.rts2_cache_es/\n.rts2_cache_umd/\n\n# Optional REPL history\n.node_repl_history\n\n# Output of 'npm pack'\n*.tgz\n\n# Yarn Integrity file\n.yarn-integrity\n\n# dotenv environment variables file\n.env\n.env.test\n\n# parcel-bundler cache (https://parceljs.org/)\n.cache\n.parcel-cache\n\n# Next.js build output\n.next\nout\n\n# Nuxt.js build / generate output\n.nuxt\ndist\n\n# Gatsby files\n.cache/\n# Comment in the public line in if your project uses Gatsby and not Next.js\n# https://nextjs.org/blog/next-9-1#public-directory-support\n# public\n\n# vuepress build output\n.vuepress/dist\n\n# Serverless directories\n.serverless/\n\n# FuseBox cache\n.fusebox/\n\n# DynamoDB Local files\n.dynamodb/\n\n# TernJS port file\n.tern-port\n\n# Stores VSCode versions used for testing VSCode extensions\n.vscode-test\n\n# yarn v2\n.yarn/cache\n.yarn/unplugged\n.yarn/build-state.yml\n.yarn/install-state.gz\n.pnp.*\n\n# Mike isn't into package-lock, but feel free to disagree with him\npackage-lock.json\n\n# antimatter wiki db files\nantimatter_wiki_db/\nantimatter_wiki_db.*\nantimatter_wiki.*\napril-db-backup\njan-db-backup-2025\ndb\nserver.sh\n\n# apple\n.DS_Store\n"
  },
  {
    "path": "antimatter/antimatter.js",
    "content": "/// # Software Architecture\n/// The software is architected into three objects:\n///\n/// ``` js\n/// var {create_antimatter_crdt, create_json_crdt, sequence_crdt} = require('@braidjs/antimatter') \n/// ```\n\n// v522\n\n/// - *antimatter_crdt*: created using `create_antimatter_crdt`, this object is a json_crdt with antimatter algorithm methods added to it so that it can communicate with other peers to learn which history can be pruned, and tells the underlying json_crdt object to prune it.\nvar create_antimatter_crdt;\n\n/// - *json_crdt*: created using `create_json_crdt`, this object is a pruneable\n///   JSON CRDT — \"JSON\" meaning it represents an arbitrary JSON datstructure, and\n///   \"CRDT\" and \"pruneable\" having the same meaning as for sequence_crdt below. The\n///   json_crdt makes recursive use of sequence_crdt structures to represent\n///   arbitrary JSON (for instance, a map is represented with a sequence_crdt\n///   structure for each value, where the first element in the sequence is the\n///   value).\nvar create_json_crdt;\n\n/// - *sequence_crdt*: methods to manipulate a pruneable sequence CRDT —\n///   \"sequence\" meaning it represents a javascript string or array, \"CRDT\" meaning\n///   this structure can be merged with other ones, and \"pruneable\" meaning that it\n///   supports an operation to remove meta-data when it is no longer needed (whereas\n///   CRDT's often keep track of this meta-data forever).\nvar sequence_crdt = {};\n\n(() => {\n  /// # create_antimatter_crdt(send[, init])\n  ///\n  /// Creates and returns a new antimatter_crdt object (or adds antimatter_crdt methods and properties to `init`).\n  ///\n  /// * `send`: A callback function to be called whenever this antimatter_crdt wants to send a\n  ///   message over a connection registered with `subscribe`. The sole\n  ///   parameter to this function is a JSONafiable object that hopes to be passed to\n  ///   the `receive` method on the antimatter_crdt object at the other end of the\n  ///   connection specified in the `conn` key.\n  /// * `get_time`: function that returns a number representing time (e.g. `Date.now()`)\n  /// * `set_timeout`: function that takes a callback and timeout length, and calls that callback after that amount of time; also returns an identifier that can be passed to `clear_timeout` to cancel the timeout (e.g. wrapping the javascript setTimeout)\n  /// * `clear_timeout`: function that takes a timeout identifier an cancels it (e.g. wrapping the javascript clearTimeout)\n  /// * `init`: (optional) An antimatter_crdt object to start with, which we'll add any properties to that it doesn't have, and we'll add all the antimatter_crdt methods to it. This option exists so you can serialize an antimatter_crdt instance as JSON, and then restore it later. \n  /// ``` js\n  /// var antimatter_crdt = create_antimatter_crdt(msg => {\n  ///     websockets[msg.conn].send(JSON.stringify(msg))\n  ///   },\n  ///   () => Date.now(),\n  ///   (func, t) => setTimeout(func, t),\n  ///   (t) => clearTimeout(t)),\n  ///.  JSON.parse(fs.readFileSync('./antimatter.backup'))\n  /// )\n  /// ```\n  create_antimatter_crdt = (\n    send,\n    get_time,\n    set_timeout,\n    clear_timeout,\n    self\n  ) => {\n    self = create_json_crdt(self);\n    self.send = send;\n    // purposely not:\n    // self.id = self.id || Math.random().toString(36).slice(2);\n    // to accomodate an id of numeric 0\n    if (self.id === undefined) self.id = Math.random().toString(36).slice(2);\n    self.next_seq = self.next_seq || 0;\n\n    self.conns = self.conns || {};\n    self.proto_conns = self.proto_conns || {};\n    self.conn_count = self.conn_count || 0;\n\n    self.fissures = self.fissures || {};\n    self.acked_boundary = self.acked_boundary || {};\n    self.ackmes = self.ackmes || {};\n    self.forget_cbs = self.forget_cbs || {};\n\n    self.version_groups = self.version_groups || {};\n\n    self.ackme_map = self.ackme_map || {};\n    self.ackme_time_est_1 = self.ackme_time_est_1 || 1000;\n    self.ackme_time_est_2 = self.ackme_time_est_2 || 1000;\n    self.ackme_current_wait_time = self.ackme_current_wait_time || 1000;\n    self.ackme_increases_allowed = 1;\n    self.ackme_timeout = self.ackme_timeout || null;\n\n    function raw_add_version_group(version_array) {\n      let version_map = {};\n      for (let v of version_array) {\n        if (version_map[v]) continue;\n        version_map[v] = true;\n        if (self.version_groups[v]) self.version_groups[v].forEach((v) => (version_map[v] = true));\n      }\n      let version_group = Object.keys(version_map).sort();\n      version_group.forEach((v) => (self.version_groups[v] = version_group));\n      return version_group;\n    }\n\n    function get_parent_and_child_sets(children) {\n      let parent_sets = {};\n      let child_sets = {};\n      let done = {};\n      function add_set_to_sets(s, sets, mark_done) {\n        let container = { members: s };\n        let array = Object.keys(s);\n        if (array.length < 2) return;\n        for (let v of array) {\n          sets[v] = container;\n          if (mark_done) done[v] = true;\n        }\n      }\n      add_set_to_sets(self.current_version, parent_sets, true);\n      for (let v of Object.keys(self.T)) {\n        if (done[v]) continue;\n        done[v] = true;\n        if (!children[v]) continue;\n        let first_child_set = children[v];\n        let first_child_array = Object.keys(first_child_set);\n        let first_parent_set = self.T[first_child_array[0]];\n        let first_parent_array = Object.keys(first_parent_set);\n        if (\n          first_child_array.every((child) => {\n            let parent_set = self.T[child];\n            let parent_array = Object.keys(parent_set);\n            return (\n              parent_array.length == first_parent_array.length &&\n              parent_array.every((parent) => first_parent_set[parent])\n            );\n          }) &&\n          first_parent_array.every((parent) => {\n            let child_set = children[parent];\n            let child_array = Object.keys(child_set);\n            return (\n              child_array.length == first_child_array.length &&\n              child_array.every((child) => first_child_set[child])\n            );\n          })\n        ) {\n          add_set_to_sets(first_parent_set, parent_sets, true);\n          add_set_to_sets(first_child_set, child_sets);\n        }\n      }\n      return { parent_sets, child_sets };\n    }\n\n    function find_one_bubble(bottom, children, child_sets, restricted) {\n      let expecting = { ...bottom };\n      let seen = {};\n      Object.keys(bottom).forEach(\n        (v) =>\n          children[v] &&\n          Object.keys(children[v]).forEach((v) => (seen[v] = true))\n      );\n      let q = Object.keys(expecting);\n      let last_top = null;\n      while (q.length) {\n        cur = q.shift();\n        if (!self.T[cur]) {\n          if (!restricted) throw \"bad\";\n          else return last_top;\n        }\n        if (restricted && restricted[cur]) return last_top;\n\n        if (seen[cur]) continue;\n\n        if (children[cur] && !Object.keys(children[cur]).every((c) => seen[c]))\n          continue;\n        seen[cur] = true;\n        delete expecting[cur];\n\n        if (!Object.keys(expecting).length) {\n          last_top = { [cur]: true };\n          if (!restricted) return last_top;\n        }\n\n        Object.keys(self.T[cur]).forEach((p) => {\n          expecting[p] = true;\n          q.push(p);\n        });\n\n        if (\n          child_sets[cur] &&\n          Object.keys(child_sets[cur].members).every((v) => seen[v])\n        ) {\n          let expecting_array = Object.keys(expecting);\n          let parent_set = self.T[cur];\n          let parent_array = Object.keys(parent_set);\n          if (\n            expecting_array.length == parent_array.length &&\n            expecting_array.every((v) => parent_set[v])\n          ) {\n            last_top = child_sets[cur].members;\n            if (!restricted) return last_top;\n          }\n        }\n      }\n      return last_top;\n    }\n\n    function add_version_group(version_array) {\n      let version_group = raw_add_version_group(version_array);\n      if (!version_array.some((x) => self.T[x])) return version_group[0];\n\n      let children = self.get_child_map();\n      let { parent_sets, child_sets } = get_parent_and_child_sets(children);\n\n      let to_bubble = {};\n      function mark_bubble(v, bubble) {\n        if (to_bubble[v]) return;\n        to_bubble[v] = bubble;\n        for (let vv of Object.keys(self.T[v])) mark_bubble(vv, bubble);\n      }\n\n      let bottom = Object.fromEntries(\n        version_group.filter((x) => self.T[x]).map((x) => [x, true])\n      );\n      let top = find_one_bubble(bottom, children, child_sets);\n      let bubble = [Object.keys(bottom).sort()[0], Object.keys(top)[0]];\n      for (let v of Object.keys(top)) to_bubble[v] = bubble;\n      for (let v of Object.keys(bottom)) mark_bubble(v, bubble);\n\n      self.apply_bubbles(to_bubble);\n      return version_group[0];\n    }\n\n    let orig_send = send;\n    send = (x) => {\n      if (self.version_groups[x.version])\n        x.version = self.version_groups[x.version];\n      if (x.parents) {\n        x.parents = { ...x.parents };\n        Object.keys(x.parents).forEach((v) =>\n          self.version_groups[v] && self.version_groups[v].forEach((v) => (x.parents[v] = true))\n        );\n      }\n      if (Array.isArray(x.versions)) {\n        x.versions = JSON.parse(JSON.stringify(x.versions));\n        x.versions.forEach(\n          (v) =>\n            self.version_groups[v.version] &&\n            (v.version = self.version_groups[v.version])\n        );\n        x.versions.forEach((v) => {\n          Object.keys(v.parents).forEach((vv) =>\n            self.version_groups[vv] && self.version_groups[vv].forEach((vv) => (v.parents[vv] = true))\n          );\n        });\n      }\n\n      orig_send(x);\n    };\n\n    /// # antimatter_crdt.receive(message)\n    ///\n    /// Let this antimatter object \"receive\" a message from another antimatter object, presumably from its `send` callback.\n    /// ``` js\n    /// websocket.on('message', data => {\n    ///     antimatter_crdt.receive(JSON.parse(data)) });\n    /// ```\n    /// You generally do not need to mess with a message object directly, but below are the various message objects you might see, categorized by their `type` entry. Note that each object also\n    ///   contains a `conn` entry with the id of the connection the message is sent\n    ///   over.\n    self.receive = (x) => {\n      let {\n        type,\n        version,\n        parents,\n        patches,\n        versions,\n        fissure,\n        fissures,\n        seen,\n        forget,\n        ackme,\n        peer,\n        conn,\n      } = x;\n\n      if (version && typeof version != \"string\") {\n        if (!self.T[version[0]]) version = add_version_group(version);\n        else version = version[0];\n      }\n      if (parents) {\n        parents = { ...parents };\n        Object.keys(parents).forEach((v) => {\n          if (self.version_groups[v] && self.version_groups[v][0] != v)\n            delete parents[v];\n        });\n      }\n\n      if (versions && versions.forEach) versions.forEach((v) => {\n        if (typeof v.version != \"string\") {\n          if (!self.T[v.version[0]]) v.version = add_version_group(v.version);\n          else v.version = v.version[0];\n        }\n        v.parents = { ...v.parents };\n        Object.keys(v.parents).forEach((vv) => {\n          if (self.version_groups[vv] && self.version_groups[vv][0] != vv)\n            delete v.parents[vv];\n        });\n      });\n\n      let ackme_versions_array = version\n        ? [version]\n        : versions && !Array.isArray(versions)\n        ? Object.keys(versions).sort()\n        : null;\n      let ackme_versions =\n        ackme_versions_array &&\n        Object.fromEntries(ackme_versions_array.map((v) => [v, true]));\n\n      if (versions && !Array.isArray(versions)) {\n        versions = { ...versions };\n        Object.keys(versions).forEach((v) => {\n          if (self.version_groups[v] && self.version_groups[v][0] != v)\n            delete versions[v];\n        });\n        if (!Object.keys(versions).length) return;\n      }\n\n      /// ## message `subscribe`\n      /// `subscribe` is the first message sent over a connection, and the peer at the other end will respond with `welcome`.\n      /// ``` js\n      /// { type: 'subscribe',\n      ///   peer: 'SENDER_ID',\n      ///   conn: 'CONN_ID',\n      ///   parents: {'PARENT_VERSION_ID': true, ...} }\n      /// ```\n      /// The `parents` are optional, and describes which versions this peer already has. The other end will respond with versions since that set of parents.\n      if (type == \"subscribe\" || (type == \"welcome\" && peer != null)) {\n        if (self.conns[conn] != null) throw Error(\"bad\");\n        self.conns[conn] = { peer, seq: ++self.conn_count };\n      }\n\n      /// ## message `fissure`\n      ///\n      /// Sent to alert peers about a fissure. The `fissure` entry contains information about the two peers involved in the fissure, the specific connection id that broke, the `versions` that need to be protected, and the `time` of the fissure (in case we want to ignore it after some time). It is also possible to send multiple `fissures` in an array.\n      /// ``` js\n      /// { type: 'fissure',\n      ///   fissure: { // or fissures: [{...}, {...}, ...],\n      ///     a: 'PEER_A_ID',\n      ///     b:  'PEER_B_ID',\n      ///     conn: 'CONN_ID',\n      ///     versions: {'VERSION_ID': true, ...},\n      ///     time: Date.now()\n      ///   },\n      ///   conn: 'CONN_ID' }\n      /// ```\n      /// Note that `time` isn't used for anything critical, as it's just wallclock time.\n      if (fissure) fissures = [fissure];\n\n      if (fissures) fissures = fissures.map((f) => {\n        f = JSON.parse(JSON.stringify(f));\n        f.t = self.conn_count;\n        return f;\n      });\n\n      if (versions && (type == \"update\" || type == \"welcome\"))\n        versions = Object.fromEntries(versions.map((v) => [v.version, v]));\n      if (version) versions = { [version]: true };\n\n      let rebased_patches = [];\n\n      let fissures_back = [];\n      let fissures_forward = [];\n      let fissures_done = {};\n\n      function copy_fissures(fs) {\n        return fs.map((f) => {\n          f = JSON.parse(JSON.stringify(f));\n          delete f.t;\n          return f;\n        });\n      }\n\n      if (fissures) {\n        let fiss_map = Object.fromEntries(\n          fissures.map((f) => [f.a + \":\" + f.b + \":\" + f.conn, f])\n        );\n        for (let [key, f] of Object.entries(fiss_map)) {\n          if (fissures_done[f.conn]) continue;\n          fissures_done[f.conn] = true;\n\n          let our_f = self.fissures[key];\n          let other_key = f.b + \":\" + f.a + \":\" + f.conn;\n          let their_other = fiss_map[other_key];\n          let our_other = self.fissures[other_key];\n\n          if (!our_f) self.fissures[key] = f;\n          if (their_other && !our_other) self.fissures[other_key] = their_other;\n\n          if (!their_other && !our_other && f.b == self.id && !self.conns[f.conn]) {\n            our_other = self.fissures[other_key] = {\n              ...f,\n              a: f.b,\n              b: f.a,\n              t: self.conn_count,\n            };\n          }\n\n          if (!their_other && our_other) {\n            fissures_back.push(f);\n            fissures_back.push(our_other);\n          }\n\n          if (!our_f || (their_other && !our_other)) {\n            fissures_forward.push(f);\n            if (their_other || our_other)\n              fissures_forward.push(their_other || our_other);\n          }\n        }\n      }\n\n      /// ## message `welcome`\n      /// Sent in response to a `subscribe`, basically contains the initial state of the document; incoming `welcome` messages are also propagated over all our other connections but only with information that was new to us, so the propagation will eventually stop. When sent in response to a `subscribe` (rather than being propagated), we include a `peer` entry with the id of the sending peer, so they know who we are, and to trigger them to send us their own  `welcome` message.\n      ///\n      /// ``` js\n      /// {\n      ///   type: 'welcome',\n      ///   versions: [\n      ///     //each version looks like an update message...\n      ///   ],\n      ///   fissures: [\n      ///     //each fissure looks as it would in a fissure message...\n      ///   ],\n      ///   parents: \n      ///     {\n      ///       //versions you must have before consuming these new versions\n      ///       'PARENT_VERSION_ID': true,\n      ///       ...\n      ///     },\n      ///   [peer: 'SENDER_ID'], // if responding to a subscribe\n      ///   conn: 'CONN_ID'\n      /// } \n      /// ```\n      let _T = {};\n      let added_versions = [];\n      if (type == \"welcome\") {\n        var versions_to_add = {};\n        let vs = Object.values(versions);\n        vs.forEach((v) => (versions_to_add[v.version] = v.parents));\n        vs.forEach((v) => {\n          if (\n            self.T[v.version] ||\n            (self.version_groups[v.version] &&\n              self.version_groups[v.version][0] != v.version)\n          ) {\n            remove_ancestors(v.version);\n            function remove_ancestors(v) {\n              if (versions_to_add[v]) {\n                Object.keys(versions_to_add[v]).forEach(remove_ancestors);\n                delete versions_to_add[v];\n              }\n            }\n          }\n        });\n\n        for (let v of vs) _T[v.version] = v.parents;\n\n        l1: for (var v of vs) {\n          if (versions_to_add[v.version]) {\n            let ps = Object.keys(v.parents);\n\n            if (!ps.length && Object.keys(self.T).length) continue;\n            for (p of ps) if (!self.T[p]) continue l1;\n\n            rebased_patches = rebased_patches.concat(\n              self.add_version(v.version, v.parents, v.patches, v.sort_keys)\n            );\n\n            added_versions.push(v);\n            delete _T[v.version];\n          }\n        }\n      }\n\n      if (type == \"subscribe\" || (type == \"welcome\" && peer != null)) {\n        let fissures_back = Object.values(self.fissures);\n\n        if (type == \"welcome\") {\n          var leaves = { ..._T };\n          Object.keys(_T).forEach((v) => {\n            Object.keys(_T[v]).forEach((p) => delete leaves[p]);\n          });\n\n          let f = {\n            a: self.id,\n            b: peer,\n            conn: \"-\" + conn,\n            versions: Object.fromEntries(\n              added_versions\n                .concat(Object.keys(leaves).map((v) => versions[v]))\n                .map((v) => [v.version, true])\n            ),\n            time: get_time(),\n            t: self.conn_count,\n          };\n          if (Object.keys(f.versions).length) {\n            let key = f.a + \":\" + f.b + \":\" + f.conn;\n            self.fissures[key] = f;\n            fissures_back.push(f);\n            fissures_forward.push(f);\n          }\n        }\n\n        send({\n          type: \"welcome\",\n          versions: self.generate_braid(parents || versions),\n          fissures: copy_fissures(fissures_back),\n          parents:\n            parents &&\n            Object.keys(parents).length &&\n            self.get_leaves(self.ancestors(parents, true)),\n          ...(type == \"subscribe\" ? { peer: self.id } : {}),\n          conn,\n        });\n      } else if (fissures_back.length) {\n        send({\n          type: \"fissure\",\n          fissures: copy_fissures(fissures_back),\n          conn,\n        });\n      }\n\n      /// ## message `forget`\n      /// Used to disconnect without creating a fissure, presumably meaning the sending peer doesn't plan to make any edits while they're disconnected.\n      /// ``` js\n      /// {type: 'forget', conn: 'CONN_ID'}\n      /// ```\n      if (type == \"forget\") {\n        if (self.conns[conn] == null) throw Error(\"bad\");\n        send({ type: \"ack\", forget: true, conn });\n\n        delete self.conns[conn];\n        delete self.proto_conns[conn];\n      }\n\n      /// ## message forget `ack` \n      /// Sent in response to `forget`.. so they know we forgot them.\n      /// ``` js\n      /// {type: 'ack', forget: true, conn: 'CONN_ID'}\n      /// ```\n      if (type == \"ack\" && forget) {\n        self.forget_cbs[conn]();\n      }\n\n      /// ## message `update`\n      /// Sent to alert peers about a change in the document. The change is represented as a version, with a unique id, a set of parent versions (the most recent versions known before adding this version), and an array of patches, where the offsets in the patches do not take into account the application of other patches in the same array.\n      /// ``` js\n      /// { type: 'update',\n      ///   version: 'VERSION_ID',\n      ///   parents: {'PARENT_VERSION_ID': true, ...},\n      ///   patches: [ {range: '.json.path.a.b', content: 42}, ... ],\n      ///   conn: 'CONN_ID' }\n      /// ```\n      if (type == \"update\") {\n        if (conn == null || !self.T[version]) {\n          let ps = Object.keys(parents);\n\n          if (!ps.length && Object.keys(self.T).length) return;\n          for (p of ps) if (!self.T[p]) return;\n\n          rebased_patches = self.add_version(version, parents, patches);\n\n          for (let c of Object.keys(self.conns))\n            if (c != conn)\n              send({ type: \"update\", version, parents, patches, ackme, conn: c });\n        }\n      }\n\n      /// ## message `ackme`\n      /// Sent for pruning purposes, to try and establish whether everyone has seen the most recent versions. Note that an `update` message is treated as a `ackme` message for the version in the update.\n      /// ``` js\n      /// { type: 'ackme',\n      ///   version: 'ACKME_ID',\n      ///   versions: {'VERSION_ID_A': true, ...},\n      ///   conn: 'CONN_ID' }\n      /// ```\n      if (type == \"ackme\" || type == \"update\") {\n        if (!Object.keys(versions).every((v) => self.T[v])) return;\n\n        if (\n          self.ackme_timeout &&\n          ackme_versions_array.length ==\n            Object.keys(self.current_version).length &&\n          ackme_versions_array.every((x) => self.current_version[x])\n        ) {\n          clear_timeout(self.ackme_timeout);\n          self.ackme_timeout = null;\n        }\n\n        let m = self.ackmes[ackme];\n        if (!m) {\n          m = self.ackmes[ackme] = {\n            id: ackme,\n            origin: conn,\n            count: Object.keys(self.conns).length - (conn != null ? 1 : 0),\n            versions: ackme_versions,\n            seq: self.conn_count,\n            time: get_time(),\n          };\n          m.orig_count = m.count;\n          m.real_ackme = type == \"ackme\";\n          m.key = JSON.stringify(Object.keys(m.versions).sort());\n          self.ackme_map[m.key] = self.ackme_map[m.key] || {};\n          let before = Object.keys(self.ackme_map[m.key]).length;\n          self.ackme_map[m.key][m.id] = true;\n          let after = Object.keys(self.ackme_map[m.key]).length;\n          if (before == 1 && after == 2 && self.ackme_increases_allowed > 0) {\n            self.ackme_current_wait_time *= 2;\n            self.ackme_increases_allowed--;\n          }\n\n          if (type == \"ackme\")\n            for (let c of Object.keys(self.conns))\n              if (c != conn)\n                send({\n                  type: \"ackme\",\n                  ackme,\n                  versions: ackme_versions,\n                  conn: c,\n                });\n        } else if (m.seq < self.conns[conn].seq) {\n          send({\n            type: \"ack\",\n            seen: \"local\",\n            ackme,\n            versions: ackme_versions,\n            conn,\n          });\n          return;\n        } else m.count--;\n        check_ackme_count(ackme);\n      }\n\n      /// ## message local `ack`\n      /// Sent in response to `update`, but not right away; a peer will first send the `update` over all its other connections, and only after they have all responded with a local `ack` – and we didn't see a `fissure` message while waiting – will the peer send a local `ack` over the originating connection.\n      /// ``` js\n      /// {type: 'ack', seen: 'local', version: 'VERSION_ID', conn: 'CONN_ID'}\n      /// ```\n      if (type == \"ack\" && seen == \"local\") {\n        let m = self.ackmes[ackme];\n        if (!m || m.cancelled) return;\n        m.count--;\n        check_ackme_count(ackme);\n      }\n      function check_ackme_count(ackme) {\n        let m = self.ackmes[ackme];\n        if (m && m.count === 0 && !m.cancelled) {\n          m.time2 = get_time();\n          if (m.orig_count > 0) {\n            let t = m.time2 - m.time;\n            let weight = 0.1;\n            self.ackme_time_est_1 =\n              weight * t + (1 - weight) * self.ackme_time_est_1;\n          }\n          if (m.origin != null) {\n            if (self.conns[m.origin])\n              send({\n                type: \"ack\",\n                seen: \"local\",\n                ackme,\n                versions: ackme_versions,\n                conn: m.origin,\n              });\n          } else add_full_ack_leaves(ackme);\n        }\n      }\n\n      /// ## message global `ack`\n      /// Sent after an originating peer has received a local `ack` over all its connections, or after any peer receives a global `ack`, so that everyone may come to know that this version has been seen by everyone in this peer group.\n      /// ``` js\n      /// {type: 'ack', seen: 'global', version: 'VERSION_ID', conn: 'CONN_ID'}\n      /// ```\n      if (type == \"ack\" && seen == \"global\") {\n        let m = self.ackmes[ackme];\n\n        if (!m || m.cancelled) return;\n\n        let t = get_time() - m.time2;\n        let weight = 0.1;\n        self.ackme_time_est_2 =\n          weight * t + (1 - weight) * self.ackme_time_est_2;\n\n        if (m.real_ackme && Object.keys(self.ackme_map[m.key]).length == 1) {\n          self.ackme_current_wait_time *= 0.8;\n        }\n\n        add_full_ack_leaves(ackme, conn);\n      }\n      function add_full_ack_leaves(ackme, conn) {\n        let m = self.ackmes[ackme];\n        if (!m || m.cancelled) return;\n        m.cancelled = true;\n\n        for (let [c, cc] of Object.entries(self.conns))\n          if (c != conn && cc.seq <= m.seq)\n            send({\n              type: \"ack\",\n              seen: \"global\",\n              ackme,\n              versions: ackme_versions,\n              conn: c,\n            });\n\n        for (let v of Object.keys(m.versions)) {\n          if (!self.T[v]) continue;\n          let marks = {};\n          let f = (v) => {\n            if (!marks[v]) {\n              marks[v] = true;\n              delete self.acked_boundary[v];\n              Object.keys(self.T[v]).forEach(f);\n            }\n          };\n          f(v);\n          self.acked_boundary[v] = true;\n        }\n        prune(false, m.seq);\n      }\n\n      if (added_versions.length || fissures_forward.length) {\n        for (let c of Object.keys(self.conns))\n          if (c != conn)\n            send({\n              type: added_versions.length ? \"welcome\" : \"fissure\",\n              ...(added_versions.length ? { versions: added_versions } : {}),\n              fissures: copy_fissures(fissures_forward),\n              conn: c,\n            });\n      }\n\n      if (fissures_forward.length) resolve_fissures();\n\n      if (\n        !self.ackme_timeout &&\n        type != \"update\" &&\n        type != \"ackme\" &&\n        prune(true)\n      ) {\n        if (!self.ackme_current_wait_time) {\n          self.ackme_current_wait_time =\n            4 * (self.ackme_time_est_1 + self.ackme_time_est_2);\n        }\n\n        let t = Math.random() * self.ackme_current_wait_time;\n\n        self.ackme_timeout = set_timeout(() => {\n          self.ackme_increases_allowed = 1;\n          self.ackme_timeout = null;\n          if (prune(true)) self.ackme();\n        }, t);\n      }\n\n      if (type == \"welcome\" && peer == null && prune(true, null, true))\n        self.ackme();\n\n      return rebased_patches;\n    };\n\n    /// # antimatter_crdt.subscribe(conn)\n    ///\n    /// Register a new connection with id `conn` – triggers this antimatter_crdt object to send a `subscribe` message over the given connection. \n    ///\n    /// ``` js\n    /// alice_antimatter_crdt.subscribe('connection_to_bob')\n    /// ```\n    self.subscribe = (conn) => {\n      self.proto_conns[conn] = true;\n      send({ type: \"subscribe\", peer: self.id, conn });\n    };\n\n    /// # antimatter_crdt.forget(conn)\n    ///\n    /// Disconnect the given connection without creating a fissure – we don't need to reconnect with them.. it seems.. if we do, then we need to call `disconnect` instead, which will create a fissure allowing us to reconnect.\n    ///\n    /// ``` js\n    /// alice_antimatter_crdt.forget('connection_to_bob')\n    /// ```\n    self.forget = async (conn) => {\n      await new Promise((done) => {\n        if (self.conns[conn] != null) {\n          self.forget_cbs[conn] = done;\n          send({ type: \"forget\", conn });\n        }\n        self.disconnect(conn, false);\n      });\n    };\n\n    /// # antimatter_crdt.disconnect(conn)\n    ///\n    /// If we detect that a connection has closed, let the antimatter_crdt object know by calling this method with the given connection id – this will create a fissure so we can reconnect with whoever was on the other end of the connection later on. \n    ///\n    /// ``` js\n    /// alice_antimatter_crdt.disconnect('connection_to_bob')\n    /// ```\n    self.disconnect = (conn, fissure = true) => {\n      if (self.conns[conn] == null && !self.proto_conns[conn]) return;\n      delete self.proto_conns[conn];\n\n      if (self.conns[conn]) {\n        let peer = self.conns[conn].peer;\n        delete self.conns[conn];\n\n        if (fissure) {\n          fissure = create_fissure(peer, conn);\n          if (fissure) self.receive({ type: \"fissure\", fissure });\n        }\n      }\n    };\n\n    /// # antimatter_crdt.update(...patches)\n    ///\n    /// Modify this antimatter_crdt object by applying the given patches. Each patch looks like `{range: '.life.meaning', content: 42}`. Calling this method will trigger calling the `send` callback to let our peers know about this change. \n    ///\n    /// ``` js\n    /// antimatter_crdt.update({\n    ///   range: '.life.meaning',\n    ///   content: 42\n    /// })\n    /// ```\n    self.update = (...patches) => {\n      var version = `${self.next_seq++}@${self.id}`;\n      self.receive({\n        type: \"update\",\n        version,\n        parents: { ...self.current_version },\n        patches,\n        ackme: Math.random().toString(36).slice(2),\n      });\n      return version;\n    };\n\n    /// # antimatter_crdt.ackme()\n    ///\n    /// Initiate sending a `ackme` message to try and establish whether certain versions can be pruned. \n    ///\n    /// ``` js\n    /// antimatter_crdt.ackme()\n    /// ```\n    self.ackme = () => {\n      let versions = { ...self.current_version };\n      Object.keys(versions).forEach((v) =>\n        self.version_groups[v] && self.version_groups[v].forEach((v) => (versions[v] = true))\n      );\n\n      let ackme = Math.random().toString(36).slice(2);\n      self.receive({ type: \"ackme\", ackme, versions });\n      return ackme;\n    };\n\n    function cancel_ackmes() {\n      for (let m of Object.values(self.ackmes)) m.cancelled = true;\n    }\n\n    function create_fissure(peer, conn) {\n      let ack_versions = self.ancestors(self.acked_boundary);\n\n      let entries = Object.keys(self.T)\n        .filter((v) => !ack_versions[v] || self.acked_boundary[v])\n        .map((v) => [v, true]);\n      if (!entries.length) return;\n      let versions = Object.fromEntries(entries);\n      return { a: self.id, b: peer, conn, versions, time: get_time() };\n    }\n\n    function resolve_fissures() {\n      let unfissured = {};\n\n      Object.entries(self.fissures).forEach(([fk, f]) => {\n        var other_key = f.b + \":\" + f.a + \":\" + f.conn;\n        var other = self.fissures[other_key];\n        if (other) {\n          if (Object.keys(f.versions).length) {\n            for (let v of Object.keys(f.versions)) unfissured[v] = true;\n            self.fissures[fk] = { ...f, versions: {} };\n          }\n          if (Object.keys(other.versions).length) {\n            for (let v of Object.keys(other.versions)) unfissured[v] = true;\n            self.fissures[other_key] = { ...other, versions: {} };\n          }\n        }\n      });\n\n      if (Object.keys(unfissured).length) {\n        cancel_ackmes();\n\n        let ack_versions = self.ancestors(self.acked_boundary);\n        let unfissured_descendants = self.descendants(unfissured, true);\n        for (let un of Object.keys(unfissured_descendants))\n          if (ack_versions[un]) delete ack_versions[un];\n        self.acked_boundary = self.get_leaves(ack_versions);\n      }\n    }\n\n    function prune(just_checking, t, just_versions) {\n      if (just_checking) t = Infinity;\n\n      let fissures = just_checking ? { ...self.fissures } : self.fissures;\n\n      Object.entries(fissures).forEach((x) => {\n        var other_key = x[1].b + \":\" + x[1].a + \":\" + x[1].conn;\n        var other = fissures[other_key];\n        if (other && x[1].t <= t && other.t <= t) {\n          delete fissures[x[0]];\n          delete fissures[other_key];\n        }\n      });\n\n      if (self.fissure_lifetime != null) {\n        var now = get_time();\n        Object.entries(fissures).forEach(([k, f]) => {\n          if (f.time == null) f.time = now;\n          if (f.time <= now - self.fissure_lifetime) {\n            delete fissures[k];\n          }\n        });\n      }\n\n      if (\n        just_checking &&\n        !just_versions &&\n        Object.keys(fissures).length < Object.keys(self.fissures).length\n      )\n        return true;\n\n      var restricted = {};\n\n      Object.values(fissures).forEach((f) => {\n        Object.keys(f.versions).forEach((v) => (restricted[v] = true));\n      });\n\n      if (!just_checking) {\n        var acked = self.ancestors(self.acked_boundary);\n        Object.keys(self.T).forEach((x) => {\n          if (!acked[x]) restricted[x] = true;\n        });\n      }\n\n      let children = self.get_child_map();\n      let { parent_sets, child_sets } = get_parent_and_child_sets(children);\n\n      let to_bubble = {};\n      function mark_bubble(v, bubble) {\n        if (to_bubble[v]) return;\n        to_bubble[v] = bubble;\n        for (let vv of Object.keys(self.T[v])) mark_bubble(vv, bubble);\n      }\n      let visited = {};\n      function f(cur) {\n        if (!self.T[cur] || visited[cur]) return;\n        visited[cur] = true;\n\n        if (\n          to_bubble[cur] == null &&\n          parent_sets[cur] &&\n          !parent_sets[cur].done\n        ) {\n          parent_sets[cur].done = true;\n          let bottom = parent_sets[cur].members;\n          let top = find_one_bubble(bottom, children, child_sets, restricted);\n          if (top) {\n            if (just_checking) return true;\n            let bottom_array = Object.keys(bottom).sort();\n            let top_array = Object.keys(top);\n            raw_add_version_group(bottom_array);\n            let bubble = [bottom_array[0], top_array[0]];\n            for (let v of top_array) to_bubble[v] = bubble;\n            for (let v of bottom_array) mark_bubble(v, bubble);\n          }\n        }\n        if (to_bubble[cur] == null) {\n          let top = find_one_bubble(\n            { [cur]: true },\n            children,\n            child_sets,\n            restricted\n          );\n          if (top && !top[cur]) {\n            if (just_checking) return true;\n            let bubble = [cur, Object.keys(top)[0]];\n            for (let v of Object.keys(top)) to_bubble[v] = bubble;\n            mark_bubble(bubble[0], bubble);\n          } else {\n            to_bubble[cur] = [cur, cur];\n          }\n        }\n        return Object.keys(\n          self.T[cur] || self.T[self.version_groups[cur][0]]\n        ).some(f);\n      }\n      if (Object.keys(self.current_version).some(f) && just_checking)\n        return true;\n\n      self.apply_bubbles(to_bubble);\n\n      for (let [k, m] of Object.entries(self.ackmes)) {\n        let vs = Object.keys(m.versions);\n        if (\n          !vs.length ||\n          !vs.every((v) => self.T[v] || self.version_groups[v])\n        ) {\n          delete self.ackmes[k];\n          delete self.ackme_map[m.key][m.id];\n          if (!Object.keys(self.ackme_map[m.key]).length)\n            delete self.ackme_map[m.key];\n        }\n      }\n\n      for (let [v, vs] of Object.entries(self.version_groups)) {\n        if (!self.T[vs[0]]) delete self.version_groups[v];\n      }\n    }\n\n    return self;\n  };\n\n  /// ## create_json_crdt([init])\n  ///\n  /// Create a new `json_crdt` object (or start with `init`, and add stuff to that). \n  ///\n  /// ``` js\n  /// var json_crdt = create_json_crdt()\n  /// ``` \n  create_json_crdt = (self) => {\n    self = self || {};\n    if (self.S === undefined) self.S = null;\n    self.T = self.T || {};\n    if (self.root_version === undefined) self.root_version = null;\n    self.current_version = self.current_version || {};\n    self.version_cache = self.version_cache || {};\n\n    let is_lit = (x) => !x || typeof x != \"object\" || x.t == \"lit\";\n    let get_lit = (x) => (x && typeof x == \"object\" && x.t == \"lit\" ? x.S : x);\n    let make_lit = (x) => (x && typeof x == \"object\" ? { t: \"lit\", S: x } : x);\n\n    /// # json_crdt.read()\n    ///\n    /// Returns an instance of the `json` object represented by this json_crdt data-structure. \n    ///\n    /// ``` js\n    /// console.log(json_crdt.read())\n    /// ```\n    self.read = (is_anc) => {\n      if (!is_anc) is_anc = () => true;\n\n      return raw_read(self.S, is_anc);\n    };\n\n    function raw_read(x, is_anc) {\n      if (x && typeof x == \"object\") {\n        if (x.t == \"lit\") return JSON.parse(JSON.stringify(x.S));\n        if (x.t == \"val\")\n          return raw_read(sequence_crdt.get(x.S, 0, is_anc), is_anc);\n        if (x.t == \"obj\") {\n          var o = {};\n          Object.entries(x.S).forEach(([k, v]) => {\n            var x = raw_read(v, is_anc);\n            if (x != null) o[k] = x;\n          });\n          return o;\n        }\n        if (x.t == \"arr\") {\n          var a = [];\n          sequence_crdt.traverse(\n            x.S,\n            is_anc,\n            (node, _, __, ___, ____, deleted) => {\n              if (!deleted)\n                node.elems.forEach((e) => a.push(raw_read(e, is_anc)));\n            },\n            true\n          );\n          return a;\n        }\n        if (x.t == \"str\") {\n          var s = [];\n          sequence_crdt.traverse(\n            x.S,\n            is_anc,\n            (node, _, __, ___, ____, deleted) => {\n              if (!deleted) s.push(node.elems);\n            },\n            true\n          );\n          return s.join(\"\");\n        }\n        throw Error(\"bad\");\n      }\n      return x;\n    }\n\n    /// # json_crdt.generate_braid(versions)\n    ///\n    /// Returns an array of `update` messages that each look like this: `{version, parents, patches, sort_keys}`, such that if we pass all these messages to `antimatter_crdt.receive()`, we'll reconstruct the data in this `json_crdt` data-structure, assuming the recipient already has the given `versions` (each version is represented as a key in an object, and each value is `true`).\n    ///\n    /// ``` js\n    /// json_crdt.generate_braid({\n    ///   alice2: true, \n    ///   bob3: true\n    /// })\n    /// ```\n    self.generate_braid = (versions) => {\n      var anc =\n        versions && Object.keys(versions).length\n          ? self.ancestors(versions, true)\n          : {};\n      var is_anc = (x) => anc[x];\n\n      if (Object.keys(self.T).length === 0) return [];\n\n      return Object.entries(self.version_cache)\n        .filter((x) => !is_anc(x[0]))\n        .map(([version, update_message]) => {\n          return (self.version_cache[version] =\n            update_message || generate_update_message(version));\n        });\n\n      function generate_update_message(version) {\n        if (!Object.keys(self.T[version]).length) {\n          return {\n            version,\n            parents: {},\n            patches: [{ range: \"\", content: self.read((v) => v == version) }],\n          };\n        }\n\n        var is_lit = (x) => !x || typeof x !== \"object\" || x.t === \"lit\";\n        var get_lit = (x) =>\n          x && typeof x === \"object\" && x.t === \"lit\" ? x.S : x;\n\n        var ancs = self.ancestors({ [version]: true });\n        delete ancs[version];\n        var is_anc = (x) => ancs[x];\n        var path = [];\n        var patches = [];\n        var sort_keys = {};\n        recurse(self.S);\n        function recurse(x) {\n          if (is_lit(x)) {\n          } else if (x.t === \"val\") {\n            sequence_crdt\n              .generate_braid(x.S, version, is_anc, raw_read)\n              .forEach((s) => {\n                if (s[2].length) {\n                  patches.push({ range: path.join(\"\"), content: s[2][0] });\n                  if (s[3]) sort_keys[patches.length - 1] = s[3];\n                }\n              });\n            sequence_crdt.traverse(x.S, is_anc, (node) => {\n              node.elems.forEach(recurse);\n            });\n          } else if (x.t === \"arr\") {\n            sequence_crdt.generate_braid(x.S, version, is_anc).forEach((s) => {\n              patches.push({\n                range: `${path.join(\"\")}[${s[0]}:${s[0] + s[1]}]`,\n                content: s[2],\n              });\n              if (s[3]) sort_keys[patches.length - 1] = s[3];\n            });\n            var i = 0;\n            sequence_crdt.traverse(x.S, is_anc, (node) => {\n              node.elems.forEach((e) => {\n                path.push(`[${i++}]`);\n                recurse(e);\n                path.pop();\n              });\n            });\n          } else if (x.t === \"obj\") {\n            Object.entries(x.S).forEach((e) => {\n              path.push(\"[\" + JSON.stringify(e[0]) + \"]\");\n              recurse(e[1]);\n              path.pop();\n            });\n          } else if (x.t === \"str\") {\n            sequence_crdt.generate_braid(x.S, version, is_anc).forEach((s) => {\n              patches.push({\n                range: `${path.join(\"\")}[${s[0]}:${s[0] + s[1]}]`,\n                content: s[2],\n              });\n              if (s[3]) sort_keys[patches.length - 1] = s[3];\n            });\n          }\n        }\n\n        return {\n          version,\n          parents: { ...self.T[version] },\n          patches,\n          sort_keys,\n        };\n      }\n    };\n\n    /// # json_crdt.apply_bubbles(to_bubble)\n    ///\n    /// This method helps prune away meta data and compress stuff when we have determined that certain versions can be renamed to other versions – these renamings are expressed in `to_bubble`, where keys are versions and values are \"bubbles\", each bubble is represented with an array of two elements, the first element is the \"bottom\" of the bubble, and the second element is the \"top\" of the bubble. We will use the \"bottom\" as the new name for the version, and we'll use the \"top\" as the new parents.\n    ///\n    /// ``` js \n    /// json_crdt.apply_bubbles({\n    ///   alice4: ['bob5', 'alice4'], \n    ///   bob5: ['bob5', 'alice4']\n    /// }) \n    /// ```\n    self.apply_bubbles = (to_bubble) => {\n      function recurse(x) {\n        if (is_lit(x)) return x;\n        if (x.t == \"val\") {\n          sequence_crdt.apply_bubbles(x.S, to_bubble);\n          sequence_crdt.traverse(\n            x.S,\n            () => true,\n            (node) => {\n              node.elems = node.elems.slice(0, 1).map(recurse);\n            },\n            true\n          );\n          if (\n            x.S.nexts.length == 0 &&\n            !x.S.next &&\n            x.S.elems.length == 1 &&\n            is_lit(x.S.elems[0])\n          )\n            return x.S.elems[0];\n          return x;\n        }\n        if (x.t == \"arr\") {\n          sequence_crdt.apply_bubbles(x.S, to_bubble);\n          sequence_crdt.traverse(\n            x.S,\n            () => true,\n            (node) => {\n              node.elems = node.elems.map(recurse);\n            },\n            true\n          );\n          if (\n            x.S.nexts.length == 0 &&\n            !x.S.next &&\n            x.S.elems.every(is_lit) &&\n            !Object.keys(x.S.deleted_by).length\n          )\n            return { t: \"lit\", S: x.S.elems.map(get_lit) };\n          return x;\n        }\n        if (x.t == \"obj\") {\n          Object.entries(x.S).forEach((e) => {\n            var y = (x.S[e[0]] = recurse(e[1]));\n            if (y == null) delete x.S[e[0]];\n          });\n          if (Object.values(x.S).every(is_lit)) {\n            var o = {};\n            Object.entries(x.S).forEach((e) => (o[e[0]] = get_lit(e[1])));\n            return { t: \"lit\", S: o };\n          }\n          return x;\n        }\n        if (x.t == \"str\") {\n          sequence_crdt.apply_bubbles(x.S, to_bubble);\n          if (\n            x.S.nexts.length == 0 &&\n            !x.S.next &&\n            !Object.keys(x.S.deleted_by).length\n          )\n            return x.S.elems;\n          return x;\n        }\n      }\n      self.S = recurse(self.S);\n\n      Object.entries(to_bubble).forEach(([version, bubble]) => {\n        if (!self.T[version]) return;\n\n        if (self.my_where_are_they_now)\n          self.my_where_are_they_now[version] = bubble[0];\n\n        if (version === bubble[1]) self.T[bubble[0]] = self.T[bubble[1]];\n\n        if (version !== bubble[0]) {\n          if (self.root_version == version) self.root_version = bubble[0];\n          delete self.T[version];\n          delete self.version_cache[version];\n          delete self.acked_boundary[version];\n          delete self.current_version[version];\n          if (\n            self.version_groups[version] &&\n            self.version_groups[version][0] == version\n          ) {\n            for (let v of self.version_groups[version]) {\n              delete self.version_groups[v];\n            }\n          }\n          for (let [k, parents] of Object.entries(self.T)) {\n            self.T[k] = parents = { ...parents };\n            for (let p of Object.keys(parents)) {\n              if (p == version) delete parents[p];\n            }\n          }\n        } else self.version_cache[version] = null;\n      });\n\n      var leaves = Object.keys(self.current_version);\n      var acked_boundary = Object.keys(self.acked_boundary);\n      var fiss = Object.keys(self.fissures);\n      if (\n        leaves.length == 1 &&\n        acked_boundary.length == 1 &&\n        leaves[0] == acked_boundary[0] &&\n        fiss.length == 0\n      ) {\n        self.T = { [leaves[0]]: {} };\n        self.S = make_lit(self.read());\n      }\n    };\n\n    /// # json_crdt.add_version(version, parents, patches[, sort_keys])\n    ///\n    /// The main method for modifying a `json_crdt` data structure. \n    ///\n    /// * `version`: Unique string associated with this edit. \n    /// * `parents`: A set of versions that this version is aware of, represented as a map with versions as keys, and values of `true`. \n    /// * `patches`: An array of patches, each patch looks like this `{range: '.life.meaning', content: 42}`. \n    /// * `sort_keys`: (optional) An object where each key is an index, and the value is a sort_key to use with the patch at the given index in the `patches` array – a sort_key overrides the version for a patch for the purposes of sorting. This can be useful after doing some pruning. \n    ///\n    /// ``` js\n    /// json_crdt.add_version(\n    ///   'alice6', \n    ///   {\n    ///     alice5: true, \n    ///     bob7: true\n    ///   }, \n    ///   [\n    ///     {\n    ///       range: '.a.b', \n    ///       content: 'c'\n    ///     }\n    ///   ]\n    /// )\n    /// ``` \n    self.add_version = (version, parents, patches, sort_keys) => {\n      if (self.T[version]) return;\n\n      if (self.root_version == null) self.root_version = version;\n\n      self.T[version] = { ...parents };\n\n      self.version_cache[version] = JSON.parse(\n        JSON.stringify({\n          version,\n          parents,\n          patches,\n          sort_keys,\n        })\n      );\n\n      Object.keys(parents).forEach((k) => {\n        if (self.current_version[k]) delete self.current_version[k];\n      });\n      self.current_version[version] = true;\n\n      if (!sort_keys) sort_keys = {};\n\n      if (!Object.keys(parents).length) {\n        var parse = self.parse_patch(patches[0]);\n        self.S = make_lit(parse.value);\n        return patches;\n      }\n\n      let is_anc;\n      if (parents == self.current_version) {\n        is_anc = (_version) => _version != version;\n      } else {\n        let ancs = self.ancestors(parents);\n        is_anc = (_version) => ancs[_version];\n      }\n\n      var rebased_patches = [];\n      patches.forEach((patch, i) => {\n        var sort_key = sort_keys[i];\n        var parse = self.parse_patch(patch);\n        var cur = resolve_path(parse);\n        if (!parse.slice) {\n          if (cur.t != \"val\") throw Error(\"bad\");\n          var len = sequence_crdt.length(cur.S, is_anc);\n          sequence_crdt.add_version(\n            cur.S,\n            version,\n            [[0, len, [parse.delete ? null : make_lit(parse.value)], sort_key]],\n            is_anc\n          );\n          rebased_patches.push(patch);\n        } else {\n          if (typeof parse.value === \"string\" && cur.t !== \"str\")\n            throw Error(\n              `Cannot splice string ${JSON.stringify(\n                parse.value\n              )} into non-string`\n            );\n          if (parse.value instanceof Array && cur.t !== \"arr\")\n            throw Error(\n              `Cannot splice array ${JSON.stringify(\n                parse.value\n              )} into non-array`\n            );\n          if (parse.value instanceof Array)\n            parse.value = parse.value.map((x) => make_lit(x));\n\n          var r0 = parse.slice[0];\n          var r1 = parse.slice[1];\n          if (r0 < 0 || Object.is(r0, -0) || r1 < 0 || Object.is(r1, -0)) {\n            let len = sequence_crdt.length(cur.S, is_anc);\n            if (r0 < 0 || Object.is(r0, -0)) r0 = len + r0;\n            if (r1 < 0 || Object.is(r1, -0)) r1 = len + r1;\n          }\n\n          var rebased_splices = sequence_crdt.add_version(\n            cur.S,\n            version,\n            [[r0, r1 - r0, parse.value, sort_key]],\n            is_anc\n          );\n          for (let rebased_splice of rebased_splices)\n            rebased_patches.push({\n              range: `${parse.path\n                .map((x) => `[${JSON.stringify(x)}]`)\n                .join(\"\")}[${rebased_splice[0]}:${\n                rebased_splice[0] + rebased_splice[1]\n              }]`,\n              content: rebased_splice[2],\n            });\n        }\n      });\n\n      function resolve_path(parse) {\n        var cur = self.S;\n        if (!cur || typeof cur != \"object\" || cur.t == \"lit\")\n          cur = self.S = {\n            t: \"val\",\n            S: sequence_crdt.create_node(self.root_version, [cur]),\n          };\n        var prev_S = null;\n        var prev_i = 0;\n        for (var i = 0; i < parse.path.length; i++) {\n          var key = parse.path[i];\n          if (cur.t == \"val\")\n            cur = sequence_crdt.get((prev_S = cur.S), (prev_i = 0), is_anc);\n          if (cur.t == \"lit\") {\n            var new_cur = {};\n            if (cur.S instanceof Array) {\n              new_cur.t = \"arr\";\n              new_cur.S = sequence_crdt.create_node(\n                self.root_version,\n                cur.S.map((x) => make_lit(x))\n              );\n            } else {\n              if (typeof cur.S != \"object\") throw Error(\"bad\");\n              new_cur.t = \"obj\";\n              new_cur.S = {};\n              Object.entries(cur.S).forEach(\n                (e) => (new_cur.S[e[0]] = make_lit(e[1]))\n              );\n            }\n            cur = new_cur;\n            sequence_crdt.update(prev_S, prev_i, cur, is_anc);\n          }\n          if (cur.t == \"obj\") {\n            let x = cur.S[key];\n            if (!x || typeof x != \"object\" || x.t == \"lit\")\n              x = cur.S[key] = {\n                t: \"val\",\n                S: sequence_crdt.create_node(self.root_version, [\n                  x == null ? null : x,\n                ]),\n              };\n            cur = x;\n          } else if (i == parse.path.length - 1 && !parse.slice) {\n            parse.slice = [key, key + 1];\n            parse.value = cur.t == \"str\" ? parse.value : [parse.value];\n          } else if (cur.t == \"arr\") {\n            cur = sequence_crdt.get((prev_S = cur.S), (prev_i = key), is_anc);\n          } else throw Error(\"bad\");\n        }\n        if (parse.slice) {\n          if (cur.t == \"val\")\n            cur = sequence_crdt.get((prev_S = cur.S), (prev_i = 0), is_anc);\n          if (typeof cur == \"string\") {\n            cur = {\n              t: \"str\",\n              S: sequence_crdt.create_node(self.root_version, cur),\n            };\n            sequence_crdt.update(prev_S, prev_i, cur, is_anc);\n          } else if (cur.t == \"lit\") {\n            if (!(cur.S instanceof Array)) throw Error(\"bad\");\n            cur = {\n              t: \"arr\",\n              S: sequence_crdt.create_node(\n                self.root_version,\n                cur.S.map((x) => make_lit(x))\n              ),\n            };\n            sequence_crdt.update(prev_S, prev_i, cur, is_anc);\n          }\n        }\n        return cur;\n      }\n\n      return rebased_patches;\n    };\n\n    /// # json_crdt.get_child_map()\n    ///\n    /// Returns a map where each key is a version, and each value is a set of child versions, represented as a map with versions as keys, and values of `true`.\n    ///\n    /// ``` js\n    /// json_crdt.get_child_map()\n    /// ``` \n    self.get_child_map = () => {\n      let children = {};\n      Object.entries(self.T).forEach(([v, parents]) => {\n        Object.keys(parents).forEach((parent) => {\n          if (!children[parent]) children[parent] = {};\n          children[parent][v] = true;\n        });\n      });\n      return children;\n    };\n\n    /// # json_crdt.ancestors(versions, ignore_nonexistent=false)\n    ///\n    /// Gather `versions` and all their ancestors into a set. `versions` is a set of versions, i.e. a map with version-keys and values of true – we'll basically return a larger set. If `ignore_nonexistent` is `true`, then we won't throw an exception if we encounter a version that we don't have in our data-structure.\n    ///\n    /// ``` js\n    /// json_crdt.ancestors({\n    ///   alice12: true, \n    ///   bob10: true\n    /// }) \n    /// ``` \n    self.ancestors = (versions, ignore_nonexistent) => {\n      var result = {};\n      function recurse(version) {\n        if (result[version]) return;\n        if (!self.T[version]) {\n          if (ignore_nonexistent) return;\n          throw Error(`The version ${version} no existo`);\n        }\n        result[version] = true;\n        Object.keys(self.T[version]).forEach(recurse);\n      }\n      Object.keys(versions).forEach(recurse);\n      return result;\n    };\n\n    /// # json_crdt.descendants(versions, ignore_nonexistent=false)\n    ///\n    /// Gather `versions` and all their descendants into a set. `versions` is a set of versions, i.e. a map with version-keys and values of true – we'll basically return a larger set. If `ignore_nonexistent` is `true`, then we won't throw an exception if we encounter a version that we don't have in our data-structure.\n    ///\n    /// ``` js\n    /// json_crdt.descendants({\n    ///   alice12: true, \n    ///   bob10: true\n    /// }) \n    /// ``` \n    self.descendants = (versions, ignore_nonexistent) => {\n      let children = self.get_child_map();\n      var result = {};\n      function recurse(version) {\n        if (result[version]) return;\n        if (!self.T[version]) {\n          if (ignore_nonexistent) return;\n          throw Error(`The version ${version} no existo`);\n        }\n        result[version] = true;\n        Object.keys(children[version] || {}).forEach(recurse);\n      }\n      Object.keys(versions).forEach(recurse);\n      return result;\n    };\n\n    /// # json_crdt.get_leaves(versions)\n    ///\n    /// Returns a set of versions from `versions` which don't also have a child in `versions`. `versions` is itself a set of versions, represented as an object with version keys and `true` values, and the return value is represented the same way.\n    self.get_leaves = (versions) => {\n      var leaves = { ...versions };\n      Object.keys(versions).forEach((v) => {\n        Object.keys(self.T[v]).forEach((p) => delete leaves[p]);\n      });\n      return leaves;\n    };\n\n    /// # json_crdt.parse_patch(patch)\n    ///\n    /// Takes a patch in the form `{range, content}`, and returns an object of the form `{path: [...], [slice: [...]], [delete: true], content}`; basically calling `parse_json_path` on `patch.range`, and adding `patch.content` along for the ride.\n    self.parse_patch = (patch) => {\n      let x = self.parse_json_path(patch.range);\n      x.value = patch.content;\n      return x;\n    };\n\n    /// # json_crdt.parse_json_path(json_path)\n    ///\n    /// Parses the string `json_path` into an object like: `{path: [...], [slice: [...]], [delete: true]}`. \n    ///\n    /// * `a.b[3]` --> `{path: ['a', 'b', 3]}`\n    /// * `a.b[3:5]` --> `{path: ['a', 'b'], slice: [3, 5]}`\n    /// * `delete a.b` --> `{path: ['a', 'b'], delete: true}`\n    ///\n    /// ``` js\n    /// console.log(json_crdt.parse_json_path('a.b.c'))\n    /// ```\n    self.parse_json_path = (json_path) => {\n      var ret = { path: [] };\n      var re =\n        /^(delete)\\s+|\\.?([^\\.\\[ =]+)|\\[((\\-?\\d+)(:\\-?\\d+)?|\"(\\\\\"|[^\"])*\")\\]/g;\n      var m;\n      while ((m = re.exec(json_path))) {\n        if (m[1]) ret.delete = true;\n        else if (m[2]) ret.path.push(m[2]);\n        else if (m[3] && m[5])\n          ret.slice = [JSON.parse(m[4]), JSON.parse(m[5].substr(1))];\n        else if (m[3]) ret.path.push(JSON.parse(m[3]));\n      }\n      return ret;\n    };\n\n    return self;\n  };\n\n  /// # sequence_crdt.create_node(version, elems, [end_cap, sort_key])\n  ///\n  /// Creates a node for a `sequence_crdt` sequence CRDT with the given properties. The resulting node will look like this:\n  ///\n  /// ``` js\n  /// {\n  ///   version, // globally unique string\n  ///   elems, // a string or array representing actual data elements of the underlying sequence\n  ///   end_cap, // this is useful for dealing with replace operations\n  ///   sort_key, // version to pretend this is for the purposes of sorting\n  ///   deleted_by : {}, // if this node gets deleted, we'll mark it here\n  ///   nexts : [], // array of nodes following this one\n  ///   next : null // final node following this one (after all the nexts)\n  /// } \n  ///\n  /// var sequence_node = sequence_crdt.create_node('alice1', 'hello')\n  /// ```\n  sequence_crdt.create_node = (version, elems, end_cap, sort_key) => ({\n    version,\n    sort_key,\n    elems,\n    end_cap,\n    deleted_by: {},\n    nexts: [],\n    next: null,\n  });\n\n  /// # sequence_crdt.generate_braid(root_node, version, is_anc)\n  ///  \n  /// Reconstructs an array of splice-information which can be passed to `sequence_crdt.add_version` in order to add `version` to another `sequence_crdt` instance – the returned array looks like: `[[insert_pos, delete_count, insert_elems, sort_key, ...], ...]`. `is_anc` is a function which accepts a version string and returns `true` if and only if the given version is an ancestor of `version` (i.e. a version which the author of `version` knew about when they created that version).\n  ///\n  /// ``` js\n  /// var root_node = sequence_crdt.create_node('root', '')\n  /// sequence_crdt.add_version(root_node, 'alice1', [[0, 0, 'hello']])\n  /// console.log(sequence_crdt.generate_braid(root_node, 'alice1', x => false)) // outputs [[0, 0, \"hello\", ...]]\n  /// ```\n  sequence_crdt.generate_braid = (S, version, is_anc, read_array_elements) => {\n    if (!read_array_elements) read_array_elements = (x) => x;\n    var splices = [];\n\n    function add_ins(offset, ins, sort_key, end_cap, is_row_header) {\n      if (typeof ins !== \"string\")\n        ins = ins.map((x) => read_array_elements(x, () => false));\n      if (splices.length > 0) {\n        var prev = splices[splices.length - 1];\n        if (\n          prev[0] + prev[1] === offset &&\n          !end_cap &&\n          (!is_row_header || prev[3] == sort_key) &&\n          (prev[4] === \"i\" || (prev[4] === \"r\" && prev[1] === 0))\n        ) {\n          prev[2] = prev[2].concat(ins);\n          return;\n        }\n      }\n      splices.push([offset, 0, ins, sort_key, end_cap ? \"r\" : \"i\"]);\n    }\n\n    function add_del(offset, del, ins) {\n      if (splices.length > 0) {\n        var prev = splices[splices.length - 1];\n        if (prev[0] + prev[1] === offset && prev[4] !== \"i\") {\n          prev[1] += del;\n          return;\n        }\n      }\n      splices.push([offset, del, ins, null, \"d\"]);\n    }\n\n    var offset = 0;\n    function helper(node, _version, end_cap, is_row_header) {\n      if (_version === version) {\n        add_ins(\n          offset,\n          node.elems.slice(0),\n          node.sort_key,\n          end_cap,\n          is_row_header\n        );\n      } else if (node.deleted_by[version] && node.elems.length > 0) {\n        add_del(offset, node.elems.length, node.elems.slice(0, 0));\n      }\n\n      if (\n        (!_version || is_anc(_version)) &&\n        !Object.keys(node.deleted_by).some(is_anc)\n      ) {\n        offset += node.elems.length;\n      }\n\n      node.nexts.forEach((next) =>\n        helper(next, next.version, node.end_cap, true)\n      );\n      if (node.next) helper(node.next, _version);\n    }\n    helper(S, null);\n    splices.forEach((s) => {\n      // if we have replaces with 0 deletes,\n      // make them have at least 1 delete..\n      // this can happen when there are multiple replaces of the same text,\n      // and our code above will associate those deletes with only one of them\n      if (s[4] === \"r\" && s[1] === 0) s[1] = 1;\n    });\n    return splices;\n  };\n\n  /// # sequence_crdt.apply_bubbles(root_node, to_bubble)\n  ///\n  /// This method helps prune away meta data and compress stuff when we have determined that certain versions can be renamed to other versions – these renamings are expressed in `to_bubble`, where keys are versions and values are \"bubbles\", each bubble is represented with an array of two elements, the first element is the \"bottom\" of the bubble, and the second element is the \"top\" of the bubble. We will use the \"bottom\" as the new name for the version, and we'll use the \"top\" as the new parents.\n  /// \n  /// ``` js\n  /// sequence_crdt.apply_bubbles(root_node, {\n  ///   alice4: ['bob5', 'alice4'],\n  ///   bob5: ['bob5', 'alice4']\n  /// })\n  /// ```\n  sequence_crdt.apply_bubbles = (S, to_bubble) => {\n    sequence_crdt.traverse(\n      S,\n      () => true,\n      (node) => {\n        if (\n          to_bubble[node.version] &&\n          to_bubble[node.version][0] != node.version\n        ) {\n          if (!node.sort_key) node.sort_key = node.version;\n          node.version = to_bubble[node.version][0];\n        }\n\n        for (var x of Object.keys(node.deleted_by)) {\n          if (to_bubble[x]) {\n            delete node.deleted_by[x];\n            node.deleted_by[to_bubble[x][0]] = true;\n          }\n        }\n      },\n      true\n    );\n\n    function set_nnnext(node, next) {\n      while (node.next) node = node.next;\n      node.next = next;\n    }\n\n    do_line(S, S.version);\n    function do_line(node, version) {\n      var prev = null;\n      while (node) {\n        if (node.nexts[0] && node.nexts[0].version == version) {\n          for (let i = 0; i < node.nexts.length; i++) {\n            delete node.nexts[i].version;\n            delete node.nexts[i].sort_key;\n            set_nnnext(\n              node.nexts[i],\n              i + 1 < node.nexts.length ? node.nexts[i + 1] : node.next\n            );\n          }\n          node.next = node.nexts[0];\n          node.nexts = [];\n        }\n\n        if (node.deleted_by[version]) {\n          node.elems = node.elems.slice(0, 0);\n          node.deleted_by = {};\n          if (prev) {\n            node = prev;\n            continue;\n          }\n        }\n\n        var next = node.next;\n\n        if (\n          !node.nexts.length &&\n          next &&\n          (!node.elems.length ||\n            !next.elems.length ||\n            (Object.keys(node.deleted_by).every((x) => next.deleted_by[x]) &&\n              Object.keys(next.deleted_by).every((x) => node.deleted_by[x])))\n        ) {\n          if (!node.elems.length) node.deleted_by = next.deleted_by;\n          node.elems = node.elems.concat(next.elems);\n          node.end_cap = next.end_cap;\n          node.nexts = next.nexts;\n          node.next = next.next;\n          continue;\n        }\n\n        if (next && !next.elems.length && !next.nexts.length) {\n          node.next = next.next;\n          continue;\n        }\n\n        for (let n of node.nexts) do_line(n, n.version);\n\n        prev = node;\n        node = next;\n      }\n    }\n  };\n\n  /// # sequence_crdt.get(root_node, i, is_anc)\n  /// \n  /// Returns the element at the `i`th position (0-based) in the `sequence_crdt` rooted at `root_node`, when only considering versions which result in `true` when passed to `is_anc`.\n  /// \n  /// ``` js\n  /// var x = sequence_crdt.get(root_node, 2, {\n  ///     alice1: true\n  /// })\n  /// ```\n  sequence_crdt.get = (S, i, is_anc) => {\n    var ret = null;\n    var offset = 0;\n    sequence_crdt.traverse(S, is_anc ? is_anc : () => true, (node) => {\n      if (i - offset < node.elems.length) {\n        ret = node.elems[i - offset];\n        return false;\n      }\n      offset += node.elems.length;\n    });\n    return ret;\n  };\n\n  /// # sequence_crdt.update(root_node, i, v, is_anc)\n  /// \n  /// Sets the element at the `i`th position (0-based) in the `sequence_crdt` rooted at `root_node` to the value `v`, when only considering versions which result in `true` when passed to `is_anc`.\n  /// \n  /// ``` js\n  /// sequence_crdt.update(root_node, 2, 'x', {\n  ///   alice1: true\n  /// })\n  /// ```\n  sequence_crdt.update = (S, i, v, is_anc) => {\n    var offset = 0;\n    sequence_crdt.traverse(S, is_anc ? is_anc : () => true, (node) => {\n      if (i - offset < node.elems.length) {\n        if (typeof node.elems == \"string\")\n          node.elems =\n            node.elems.slice(0, i - offset) +\n            v +\n            node.elems.slice(i - offset + 1);\n        else node.elems[i - offset] = v;\n        return false;\n      }\n      offset += node.elems.length;\n    });\n  };\n\n  /// # sequence_crdt.length(root_node, is_anc)\n  /// \n  /// Returns the length of the `sequence_crdt` rooted at `root_node`, when only considering versions which result in `true` when passed to `is_anc`.\n  /// \n  /// ``` js\n  /// console.log(sequence_crdt.length(root_node, {\n  ///  alice1: true\n  /// }))\n  /// ```\n  sequence_crdt.length = (S, is_anc) => {\n    var count = 0;\n    sequence_crdt.traverse(S, is_anc ? is_anc : () => true, (node) => {\n      count += node.elems.length;\n    });\n    return count;\n  };\n\n  /// # sequence_crdt.break_node(node, break_position, end_cap, new_next)\n  /// \n  /// This method breaks apart a `sequence_crdt` node into two nodes, each representing a subsequence of the sequence represented by the original node. The `node` parameter is modified into the first node, and the second node is returned. The first node represents the elements of the sequence before `break_position`, and the second node represents the rest of the elements. If `end_cap` is truthy, then the first node will have `end_cap` set – this is generally done if the elements in the second node are being replaced. This method will add `new_next` to the first node's `nexts` array.\n  /// \n  /// ``` js\n  /// var node = sequence_crdt.create_node('alice1', 'hello') // node.elems == 'hello'\n  /// var second = sequence_crdt.break_node(node, 2) // now node.elems == 'he', and second.elems == 'llo'\n  /// ```\n  sequence_crdt.break_node = (node, x, end_cap, new_next) => {\n    var tail = sequence_crdt.create_node(\n      null,\n      node.elems.slice(x),\n      node.end_cap\n    );\n    Object.assign(tail.deleted_by, node.deleted_by);\n    tail.nexts = node.nexts;\n    tail.next = node.next;\n\n    node.elems = node.elems.slice(0, x);\n    node.end_cap = end_cap;\n    node.nexts = new_next ? [new_next] : [];\n    node.next = tail;\n\n    return tail;\n  };\n\n  /// # sequence_crdt.add_version(root_node, version, splices, [is_anc])\n  /// \n  /// This is the main method in sequence_crdt, used to modify the sequence. The modification must be given a unique `version` string, and the modification itself is represented as an array of `splices`, where each splice looks like this: `[position, num_elements_to_delete, elements_to_insert, optional_sort_key]`. \n  /// \n  /// Note that all positions are relative to the original sequence, before any splices have been applied. Positions are counted by only considering nodes with versions which result in `true` when passed to `is_anc`. (and are not `deleted_by` any versions which return `true` when passed to `is_anc`).\n  /// \n  /// ``` js\n  /// var node = sequence_crdt.create_node('alice1', 'hello') \n  /// sequence_crdt.add_version(node, 'alice2', [[5, 0, ' world']], v => v == 'alice1') \n  /// ```\n  sequence_crdt.add_version = (S, version, splices, is_anc) => {\n    var rebased_splices = [];\n\n    function add_to_nexts(nexts, to) {\n      var i = binarySearch(nexts, function (x) {\n        if ((to.sort_key || to.version) < (x.sort_key || x.version)) return -1;\n        if ((to.sort_key || to.version) > (x.sort_key || x.version)) return 1;\n        return 0;\n      });\n      nexts.splice(i, 0, to);\n    }\n\n    var si = 0;\n    var delete_up_to = 0;\n\n    var process_patch = (node, offset, has_nexts, prev, _version, deleted) => {\n      var s = splices[si];\n      if (!s) return;\n      var sort_key = s[3];\n\n      if (deleted) {\n        if (s[1] == 0 && s[0] == offset) {\n          if (node.elems.length == 0 && !node.end_cap && has_nexts) return;\n          var new_node = sequence_crdt.create_node(\n            version,\n            s[2],\n            null,\n            sort_key\n          );\n\n          fresh_nodes.add(new_node);\n\n          if (node.elems.length == 0 && !node.end_cap)\n            add_to_nexts(node.nexts, new_node);\n          else sequence_crdt.break_node(node, 0, undefined, new_node);\n          si++;\n        }\n\n        if (\n          delete_up_to <= offset &&\n          s[1] &&\n          s[2] &&\n          s[0] == offset &&\n          node.end_cap &&\n          !has_nexts &&\n          (node.next && node.next.elems.length) &&\n          !Object.keys(node.next.deleted_by).some((version) => f(version))\n        ) {\n          delete_up_to = s[0] + s[1];\n\n          var new_node = sequence_crdt.create_node(\n            version,\n            s[2],\n            null,\n            sort_key\n          );\n\n          fresh_nodes.add(new_node);\n\n          add_to_nexts(node.nexts, new_node);\n        }\n\n        return;\n      }\n\n      if (s[1] == 0) {\n        var d = s[0] - (offset + node.elems.length);\n        if (d > 0) return;\n        if (d == 0 && !node.end_cap && has_nexts) return;\n        var new_node = sequence_crdt.create_node(version, s[2], null, sort_key);\n\n        fresh_nodes.add(new_node);\n\n        if (d == 0 && !node.end_cap) {\n          add_to_nexts(node.nexts, new_node);\n        } else {\n          sequence_crdt.break_node(node, s[0] - offset, undefined, new_node);\n        }\n        si++;\n        return;\n      }\n\n      if (delete_up_to <= offset) {\n        var d = s[0] - (offset + node.elems.length);\n\n        let add_at_end =\n          d == 0 &&\n          s[2] &&\n          node.end_cap &&\n          !has_nexts &&\n          (node.next && node.next.elems.length) &&\n          !Object.keys(node.next.deleted_by).some((version) => f(version));\n\n        if (d > 0 || (d == 0 && !add_at_end)) return;\n\n        delete_up_to = s[0] + s[1];\n\n        if (s[2]) {\n          var new_node = sequence_crdt.create_node(\n            version,\n            s[2],\n            null,\n            sort_key\n          );\n\n          fresh_nodes.add(new_node);\n\n          if (add_at_end) {\n            add_to_nexts(node.nexts, new_node);\n          } else {\n            sequence_crdt.break_node(node, s[0] - offset, true, new_node);\n          }\n          return;\n        } else {\n          if (s[0] == offset) {\n          } else {\n            sequence_crdt.break_node(node, s[0] - offset);\n            return;\n          }\n        }\n      }\n\n      if (delete_up_to > offset) {\n        if (delete_up_to <= offset + node.elems.length) {\n          if (delete_up_to < offset + node.elems.length) {\n            sequence_crdt.break_node(node, delete_up_to - offset);\n          }\n          si++;\n        }\n        node.deleted_by[version] = true;\n        return;\n      }\n    };\n\n    var f = is_anc || (() => true);\n    var offset = 0;\n    var rebase_offset = 0;\n    let fresh_nodes = new Set();\n    function traverse(node, prev, version) {\n      if (!version || f(version)) {\n        var has_nexts = node.nexts.find((next) => f(next.version));\n        var deleted = Object.keys(node.deleted_by).some((version) =>\n          f(version)\n        );\n        let rebase_deleted = Object.keys(node.deleted_by).length;\n        process_patch(node, offset, has_nexts, prev, version, deleted);\n\n        if (!deleted) offset += node.elems.length;\n        if (!rebase_deleted && Object.keys(node.deleted_by).length)\n          rebased_splices.push([rebase_offset, node.elems.length, \"\"]);\n      }\n      if (fresh_nodes.has(node))\n        rebased_splices.push([rebase_offset, 0, node.elems]);\n      if (!Object.keys(node.deleted_by).length)\n        rebase_offset += node.elems.length;\n\n      for (var next of node.nexts) traverse(next, null, next.version);\n      if (node.next) traverse(node.next, node, version);\n    }\n    traverse(S, null, S.version);\n\n    return rebased_splices;\n  };\n\n  /// # sequence_crdt.traverse(root_node, is_anc, callback, [view_deleted, tail_callback])\n  /// \n  /// Traverses the subset of nodes in the tree rooted at `root_node` whose versions return `true` when passed to `is_anc`. For each node, `callback` is called with these parameters: `node, offset, has_nexts, prev, version, deleted`, \n  /// \n  /// Where\n  /// - `node` is the current node being traversed\n  /// - `offset` says how many elements we have passed so far \n  /// - `has_nexts` is true if some of this node's `nexts` will be traversed according to `is_anc`\n  /// - `prev` is a pointer to the node whos `next` points to this one, or `null` if this is the root node\n  /// - `version` is the version of this node, or this node's `prev` if our version is `null`, or that node's `prev` if it is also `null`, etc\n  /// - `deleted` is true if this node is deleted according to `is_anc`\n  /// \n  /// Usually we skip deleted nodes when traversing, but we'll include them if `view_deleted` is `true`. \n  /// \n  /// `tail_callback` is an optional callback that will get called with a single parameter `node` after all of that node's children `nexts` and `next` have been traversed. \n  /// \n  /// ``` js\n  /// sequence_crdt.traverse(node, () => true, node =>\n  ///   process.stdout.write(node.elems)) \n  /// ```\n  sequence_crdt.traverse = (S, f, cb, view_deleted, tail_cb) => {\n    var offset = 0;\n    function helper(node, prev, version) {\n      var has_nexts = node.nexts.find((next) => f(next.version));\n      var deleted = Object.keys(node.deleted_by).some((version) => f(version));\n      if (view_deleted || !deleted) {\n        if (cb(node, offset, has_nexts, prev, version, deleted) == false)\n          return true;\n        offset += node.elems.length;\n      }\n      for (var next of node.nexts)\n        if (f(next.version)) {\n          if (helper(next, null, next.version)) return true;\n        }\n      if (node.next) {\n        if (helper(node.next, node, version)) return true;\n      } else if (tail_cb) tail_cb(node);\n    }\n    helper(S, null, S.version);\n  };\n\n  // modified from https://stackoverflow.com/questions/22697936/binary-search-in-javascript\n  function binarySearch(ar, compare_fn) {\n    var m = 0;\n    var n = ar.length - 1;\n    while (m <= n) {\n      var k = (n + m) >> 1;\n      var cmp = compare_fn(ar[k]);\n      if (cmp > 0) {\n        m = k + 1;\n      } else if (cmp < 0) {\n        n = k - 1;\n      } else {\n        return k;\n      }\n    }\n    return m;\n  }\n})();\n\nif (typeof module != \"undefined\")\n  module.exports = {\n    create_antimatter_crdt,\n    create_json_crdt,\n    sequence_crdt,\n  };\n"
  },
  {
    "path": "antimatter/doc.html",
    "content": "<head>\n<link rel=\"stylesheet\" href=\"https://unpkg.com/@highlightjs/cdn-assets@11.1.0/styles/default.min.css\">\n</head>\n\n<script src=\"https://unpkg.com/marked@4.0.5\"></script>\n\n<script src=\"https://unpkg.com/@highlightjs/cdn-assets@11.1.0/highlight.min.js\"></script>\n<script src=\"https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.1.0/languages/javascript.min.js\"></script>\n\n<body></body>\n<script>\n\n;(async () => {\n    let x = await fetch(`https://braid-org.github.io/braidjs/antimatter/antimatter.js`)\n    x = await x.text()\n    let code = x\n\n    x = await fetch(`https://braid-org.github.io/braidjs/antimatter/readme.md`)\n    x = await x.text()\n    let md = x\n\n    let code_blocks = []\n    if (true) {\n        let agg = code_blocks = []\n        let prev_o = 0\n        code.replace(/^[\\t ]*(?:antimatter|self|json|sequence)\\.(?:.*?) = (?:.*?)=> \\(?\\{\\n|^[\\t ]*(?:\\} else )?if \\(type == (?:.*?)\\) \\{\\n/gm, (_0, o) => {\n            agg.push(code.slice(prev_o, o))\n            prev_o = o\n        })\n        agg.push(code.slice(prev_o))\n    }\n    code_blocks = code_blocks.filter(x => x)\n\n    let md_blocks = []\n    if (true) {\n        let agg = md_blocks = []\n        let prev_o = 0\n        md.replace(/^(?:# antimatter|# json|# sequence|## message)/gm, (_0, o) => {\n            agg.push(md.slice(prev_o, o))\n            prev_o = o\n        })\n        agg.push(md.slice(prev_o))\n    }\n    md_blocks = md_blocks.filter(x => x)\n\n    function make_md(s) {\n        let d = make_html(`<div style=\"background:hsl(${Math.random() * 360}, 100%, 100%);width:50%\"></div>`)\n        d.innerHTML = marked.parse(s)\n        return d\n    }\n\n    function make_code(s) {\n        let vv = hljs.highlight(s, {language: 'javascript'}).value\n        let d = make_html(`<pre style=\"margin:0px;background:hsl(${Math.random() * 360}, 100%, 100%);width:50%\">${vv}</pre>`)\n        return d\n    }\n\n    while (md_blocks.length) {\n        let left = md_blocks.shift()\n        let right = code_blocks.shift()\n\n        let d = make_html(`<div style=\"border-top:1px solid black;display:flex;align-items: start;\"></div>`)\n        d.append(make_md(left))\n        d.append(make_code(right))\n        document.body.append(d)\n    }\n})()\n\nfunction make_html(s) {\n    let d = document.createElement('div')\n    d.innerHTML = s\n    return d.firstChild\n}\n\n</script>\n"
  },
  {
    "path": "antimatter/package.json",
    "content": "{\n  \"name\": \"@braidjs/antimatter\",\n  \"version\": \"0.0.34\",\n  \"description\": \"antimatter: a pruning algorithm for CRDTs and other mergeables\",\n  \"main\": \"antimatter.js\",\n  \"scripts\": {\n    \"test\": \"node test.js\"\n  },\n  \"author\": \"Braid Working Group\",\n  \"repository\": \"braid-org/braidjs\",\n  \"homepage\": \"https://braid.org/antimatter\"\n}\n"
  },
  {
    "path": "antimatter/readme.md",
    "content": "# MOVED TO https://github.com/braid-org/antimatter\n\n--\n\n# antimatter: an algorithm that prunes CRDT/OT history\n\n[Antimatter](https://braid.org/antimatter) is the world's first peer-to-peer synchronization algorithm that can prune its history in a network where peers disconnect, reconnect, and merge offline edits.  Antimatter supports arbitrary simultaneous edits, from arbitrary peers, under arbitrary network delays and partitions, and guarantees full CRDT/OT consistency, while pruning unnecessary history within each partitioned subnet, and across subnets once they reconnect.  In steady state, it prunes down to zero overhead.  This lets you put synchronizing data structures in more parts of your software, without worrying about memory overhead.\n\nThis package implements an antimatter peer composed of three objects:\n\n```js\nvar {create_antimatter_crdt, create_json_crdt, sequence_crdt} = require('@braidjs/antimatter')\n```\n\n- *antimatter_crdt*: created using `create_antimatter_crdt`, this object is a json_crdt with antimatter algorithm methods added to it so that it can communicate with other peers to learn which history can be pruned, and tells the underlying json_crdt object to prune it.\n- *json_crdt*: created using `create_json_crdt`, this object is a pruneable JSON CRDT — \"JSON\" meaning it represents an arbitrary JSON datstructure, and \"CRDT\" and \"pruneable\" having the same meaning as for sequence_crdt below. The json_crdt makes recursive use of sequence_crdt structures to represent arbitrary JSON (for instance, a map is represented with a sequence_crdt structure for each value, where the first element in the sequence is the value).\n- *sequence_crdt*: methods to manipulate a pruneable sequence CRDT — \"sequence\" meaning it represents a javascript string or array, \"CRDT\" meaning this structure can be merged with other ones, and \"pruneable\" meaning that it supports an operation to remove meta-data when it is no longer needed (whereas CRDT's often keep track of this meta-data forever).\n\nThe Antimatter Algorithm was invented by Michael Toomim and Greg Little in the\n[Braid Project](https://braid.org) of [Invisible College](https://invisible.college/).\n\n[Click here to see more details, and the API side-by-side with the source code.](https://braid.org/antimatter)\n"
  },
  {
    "path": "antimatter/test.html",
    "content": "<body></body>\n<script>\n\nlet real_random = Math.random\n\nfunction print(...args) {\n    let d = document.createElement('div')\n    let angle = real_random() * 360\n    d.style.background = `hsl(${angle},100%,${args[0]?.startsWith?.('i = ') ? 85 : 95}%)`\n    d.style.border = `3px solid hsl(${angle},100%,85%)`\n    d.style.display = 'grid'\n    d.style['grid-template-columns'] = '1fr '.repeat(args.length)\n    for (let a of args) {\n        if (typeof a == 'string') {\n            let dd = document.createElement('div')\n            dd.textContent = a\n            d.append(dd)\n        } else {\n            let dd = document.createElement('pre')\n            dd.style.fontSize = '50%'\n            dd.textContent = JSON.stringify(a, null, '    ')\n            d.append(dd)\n        }\n    }\n    document.body.append(d)\n}\n\nconsole.log = print\n\n</script>\n<script src=\"antimatter.js\"></script>\n<script src=\"https://dglittle.github.io/cdn/random002.js\"></script>\n\n<script>\n\n;(async () => {\n\n    let best_seed = null\n    let best_n = Infinity\n    let last_n\n\n    for (let i = 0; i < 100; i++) {\n        let seed = 'BASE_' + i\n        let r = run_test(seed, false)\n        if (!r) {\n            console.log(`seed \"${seed}\" FAILED after ${last_n} steps`)\n            if (last_n < best_n) {\n                best_n = last_n\n                best_seed = seed\n            }\n        } else {\n            console.log(`seed \"${seed}\" ${r === true ? 'passed' : 'cancelled'} after ${last_n} steps!`)\n        }\n\n        if (best_seed != null) {\n            console.log(`    (smallest failed seed: \"${best_seed}\", after ${best_n} steps)`)\n        }\n\n        await new Promise(done => setTimeout(done, 10))\n        document.body.scrollTop = document.body.scrollHeight\n    }\n    if (best_seed == null) console.log(`ALL PASSED!`)\n    document.body.scrollTop = document.body.scrollHeight\n\n    function run_test(seed, verbose) {\n        try {\n        Math.randomSeed(seed)\n\n        let num_peers = Math.floor(Math.random() * 5) + 1\n        let steps = Math.floor(Math.random() * 200)\n\n        last_n = 0\n\n        let peers = []\n        let conns = {}\n        let next_conn_id = 0\n\n        for (let i = 0; i < num_peers; i++) {\n            peers.push(create_antimatter_crdt(msg => {\n                let c = conns[msg.conn]\n\n                if (c?.[i]?.other == null) {\n                    debugger\n                }\n\n                if (verbose) console.log(`    send p${i}->p${c?.[i]?.other}(conn:${msg.conn}) msg:${msg.type}`)\n\n                c?.[c?.[i]?.other]?.q.push(msg)\n            }, () => 123, () => 1, () => {}, {id: i}))\n        }\n\n        peers[0].update({range: '', content: ''})\n\n        for (let i = 0; i < steps; i++) {\n            if (verbose) console.log(`i = ${i}`)\n\n            last_n++\n            if (last_n > best_n) return \"we've seen better\"\n\n            if (Math.random() < 1/3) {\n                // edit\n\n                let can_do = peers.filter(p => p.read() != null || p.id == 0)\n                let p = can_do[Math.floor(Math.random() * can_do.length)]\n                let text = p.read()\n\n                let start = Math.round(Math.random() * text.length)\n                let end = start + Math.round(Math.random() * (text.length - start))\n                let content = String.fromCharCode('a'.charCodeAt(0) + Math.floor(Math.random() * 26)).repeat(Math.floor(Math.random() * 4))\n\n                if (verbose) console.log(`edit p${p.id} [${start}:${end}]=${content}`)\n\n                let v = p.update({range: `[${start}:${end}]`, content})\n            } else if (Math.random() < 0.5) {\n                if (Math.random() < 0.5) {\n                    // connect\n\n                    if (peers.length > 1) {\n                        let p1 = peers[Math.floor(Math.random() * peers.length)]\n                        let p2 = p1\n                        while (p2 == p1) p2 = peers[Math.floor(Math.random() * peers.length)]\n\n                        let conn = next_conn_id++\n                        conns[conn] = {\n                            [p1.id]: {other: p2.id, q: []},\n                            [p2.id]: {other: p1.id, q: []}\n                        }\n\n                        if (verbose) console.log(`conn p${p1.id} -> p${p2.id} (conn:${conn})`)\n\n                        p1.subscribe(conn)\n                    }\n                } else {\n                    // disconnect\n\n                    let conn_keys = Object.keys(conns)\n                    if (conn_keys.length) {\n                        let conn = conn_keys[Math.floor(Math.random() * conn_keys.length)]\n                        let c = conns[conn]\n                        let peer_keys = Object.keys(c)\n                        let p = peers[peer_keys[Math.floor(Math.random() * peer_keys.length)]]\n                        let other = c[p.id].other\n\n                        if (peer_keys.length == 1) delete conns[conn]\n                        else delete c[p.id]\n\n                        if (verbose) console.log(`diss p${p.id} (conn:${conn}, ${other})`)\n\n                        if (p.conns[conn] != null || p.proto_conns[conn]) p.disconnect(conn)\n                    }\n                }\n            } else {\n                // message pump\n\n                let conn_keys = Object.keys(conns)\n                if (conn_keys.length) {\n                    let conn = conn_keys[Math.floor(Math.random() * conn_keys.length)]\n                    let c = conns[conn]\n                    let peer_keys = Object.keys(c)\n                    let p = peers[peer_keys[Math.floor(Math.random() * peer_keys.length)]]\n\n                    let msg = c[p.id].q.shift()\n\n                    if (msg) {\n                        if (verbose) console.log(`recv p${p.id} (conn:${conn}) msg:${msg.type} :: ${JSON.stringify(msg)}`)\n\n                        p.receive(msg)\n                    }\n                }            \n            }\n\n            if (verbose) console.log(...peers.map(p => ({T: p.T, f: p.fissures})))\n        }\n\n        if (verbose) console.log(`----clean conns----`)\n        for (let [conn, c] of Object.entries(conns)) {\n            let peer_keys = Object.keys(c)\n            if (peer_keys.length < 2) {\n                let p = peers[peer_keys[0]]\n                let other = c[p.id].other\n\n                delete conns[conn]\n\n                if (verbose) console.log(`diss p${p.id} (conn:${conn})`)\n\n                if (p.conns[conn] != null || p.proto_conns[conn]) p.disconnect(conn)\n            }\n        }\n\n        if (verbose) console.log(`----conn all----`)\n        for (let i = 1; i < peers.length; i++) {\n            let p1 = peers[i]\n            let p2 = peers[Math.floor(Math.random() * i)]\n\n            let conn = next_conn_id++\n            conns[conn] = {\n                [p1.id]: {other: p2.id, q: []},\n                [p2.id]: {other: p1.id, q: []}\n            }\n\n            if (verbose) console.log(`conn p${p1.id} -> p${p2.id} (conn:${conn})`)\n\n            p1.subscribe(conn)\n        }\n\n        function pump_all() {\n            if (verbose) console.log(`----pump all----`)\n            for (let i = 20000; i >= 0; i--) {\n                if (i == 0) {\n                    console.log(`safety limit exceeded!`)\n                    throw 'bad'\n                }\n                if (verbose) console.log(`i = ${i}`)\n\n                last_n++\n\n                let options = []\n\n                for (let [conn, c] of Object.entries(conns)) {\n                    for (let [pk, pp] of Object.entries(c)) {\n                        if (pp.q.length) {\n                            options.push(() => {\n                                let p = peers[pk]\n                                let msg = pp.q.shift()\n\n                                if (verbose) {\n                                    console.log(`recv p${p.id} (conn:${conn}, ${conns[conn][p.id].other}) msg:${msg.type}, ${JSON.stringify(msg)}`)\n                                }\n\n                                p.receive(msg)\n\n                                if (verbose) {\n                                    console.log(...peers.map(p => ({T: p.T, f: p.fissures})))\n                                }\n                            })\n                        }\n                    }\n                }\n\n                if (options.length) {\n                    options[Math.floor(Math.random() * options.length)]()\n                } else break\n            }\n        }\n\n        pump_all()\n\n        if (verbose) console.log(`----resend fissures----`)\n        for (let p of peers) {\n            if (verbose) console.log(`p${p.id} sending fissures`)\n\n            for (let c of Object.keys(p.conns)) p.send({type: 'welcome', versions: [], fissures: Object.values(p.fissures), conn: c})\n        }\n\n        pump_all()\n\n        if (verbose) console.log(`----joiner----`)\n\n        peers[0].update({range: '[0:0]', content: '_'})\n\n        pump_all()\n\n        let final_text = peers[0].S\n        if (typeof final_text != 'string') {\n            console.log('final not a string: ', final_text)\n            return false\n        }\n        for (let p of peers) {\n            if (p.S != final_text) {\n                console.log(`peer not in line (we want ${final_text}): `, p)\n                return false\n            }\n            if (Object.keys(p.T).length != 1) {\n                console.log('peer has big T: ', p)\n                return false\n            }\n            if (Object.keys(p.fissures).length != 0) {\n                console.log('peer has fissures: ', p)\n                return false\n            }\n        }\n\n        return true\n        } catch (e) {\n            console.log(`E: ${e}`, e.stack)\n            return false\n        }\n    }\n\n})()\n\n</script>\n"
  },
  {
    "path": "antimatter_ts/antimatter.js",
    "content": "/// # Software Architecture\n/// The software is architected into three objects:\n///\n/// ``` js\n/// var {create_antimatter_crdt, create_json_crdt, sequence_crdt} = require('@braidjs/antimatter') \n/// ```\n\n// v522\n\n/// - *antimatter_crdt*: created using `create_antimatter_crdt`, this object is a json_crdt with antimatter algorithm methods added to it so that it can communicate with other peers to learn which history can be pruned, and tells the underlying json_crdt object to prune it.\nvar create_antimatter_crdt;\n\n/// - *json_crdt*: created using `create_json_crdt`, this object is a pruneable\n///   JSON CRDT — \"JSON\" meaning it represents an arbitrary JSON datstructure, and\n///   \"CRDT\" and \"pruneable\" having the same meaning as for sequence_crdt below. The\n///   json_crdt makes recursive use of sequence_crdt structures to represent\n///   arbitrary JSON (for instance, a map is represented with a sequence_crdt\n///   structure for each value, where the first element in the sequence is the\n///   value).\nvar create_json_crdt;\n\n/// - *sequence_crdt*: methods to manipulate a pruneable sequence CRDT —\n///   \"sequence\" meaning it represents a javascript string or array, \"CRDT\" meaning\n///   this structure can be merged with other ones, and \"pruneable\" meaning that it\n///   supports an operation to remove meta-data when it is no longer needed (whereas\n///   CRDT's often keep track of this meta-data forever).\nvar sequence_crdt = {};\n\n(() => {\n  /// # create_antimatter_crdt(send[, init])\n  ///\n  /// Creates and returns a new antimatter_crdt object (or adds antimatter_crdt methods and properties to `init`).\n  ///\n  /// * `send`: A callback function to be called whenever this antimatter_crdt wants to send a\n  ///   message over a connection registered with `get` or `connect`. The sole\n  ///   parameter to this function is a JSONafiable object that hopes to be passed to\n  ///   the `receive` method on the antimatter_crdt object at the other end of the\n  ///   connection specified in the `conn` key.\n  /// * `get_time`: function that returns a number representing time (e.g. `Date.now()`)\n  /// * `set_timeout`: function that takes a callback and timeout length, and calls that callback after that amount of time; also returns an identifier that can be passed to `clear_timeout` to cancel the timeout (e.g. wrapping the javascript setTimeout)\n  /// * `clear_timeout`: function that takes a timeout identifier an cancels it (e.g. wrapping the javascript clearTimeout)\n  /// * `init`: (optional) An antimatter_crdt object to start with, which we'll add any properties to that it doesn't have, and we'll add all the antimatter_crdt methods to it. This option exists so you can serialize an antimatter_crdt instance as JSON, and then restore it later. \n  /// ``` js\n  /// var antimatter_crdt = create_antimatter_crdt(msg => {\n  ///     websockets[msg.conn].send(JSON.stringify(msg))\n  ///   },\n  ///   () => Date.now(),\n  ///   (func, t) => setTimeout(func, t),\n  ///   (t) => clearTimeout(t)),\n  ///.  JSON.parse(fs.readFileSync('./antimatter.backup'))\n  /// )\n  /// ```\n  create_antimatter_crdt = (\n    send,\n    get_time,\n    set_timeout,\n    clear_timeout,\n    self\n  ) => {\n    self = create_json_crdt(self);\n    self.send = send;\n\n    self.id = self.id || Math.random().toString(36).slice(2);\n    self.next_seq = self.next_seq || 0;\n\n    self.conns = self.conns || {};\n    self.proto_conns = self.proto_conns || {};\n    self.conn_count = self.conn_count || 0;\n\n    self.fissures = self.fissures || {};\n    self.acked_boundary = self.acked_boundary || {};\n    self.marcos = self.marcos || {};\n    self.forget_cbs = self.forget_cbs || {};\n\n    self.version_groups = self.version_groups || {};\n\n    self.marco_map = self.marco_map || {};\n    self.marco_time_est_1 = self.marco_time_est_1 || 1000;\n    self.marco_time_est_2 = self.marco_time_est_2 || 1000;\n    self.marco_current_wait_time = self.marco_current_wait_time || 1000;\n    self.marco_increases_allowed = 1;\n    self.marco_timeout = self.marco_timeout || null;\n\n    function raw_add_version_group(version_array) {\n      let version_map = {};\n      for (let v of version_array) {\n        if (version_map[v]) continue;\n        version_map[v] = true;\n        if (self.version_groups[v]) self.version_groups[v].forEach((v) => (version_map[v] = true));\n      }\n      let version_group = Object.keys(version_map).sort();\n      version_group.forEach((v) => (self.version_groups[v] = version_group));\n      return version_group;\n    }\n\n    function get_parent_and_child_sets(children) {\n      let parent_sets = {};\n      let child_sets = {};\n      let done = {};\n      function add_set_to_sets(s, sets, mark_done) {\n        let container = { members: s };\n        let array = Object.keys(s);\n        if (array.length < 2) return;\n        for (let v of array) {\n          sets[v] = container;\n          if (mark_done) done[v] = true;\n        }\n      }\n      add_set_to_sets(self.current_version, parent_sets, true);\n      for (let v of Object.keys(self.T)) {\n        if (done[v]) continue;\n        done[v] = true;\n        if (!children[v]) continue;\n        let first_child_set = children[v];\n        let first_child_array = Object.keys(first_child_set);\n        let first_parent_set = self.T[first_child_array[0]];\n        let first_parent_array = Object.keys(first_parent_set);\n        if (\n          first_child_array.every((child) => {\n            let parent_set = self.T[child];\n            let parent_array = Object.keys(parent_set);\n            return (\n              parent_array.length == first_parent_array.length &&\n              parent_array.every((parent) => first_parent_set[parent])\n            );\n          }) &&\n          first_parent_array.every((parent) => {\n            let child_set = children[parent];\n            let child_array = Object.keys(child_set);\n            return (\n              child_array.length == first_child_array.length &&\n              child_array.every((child) => first_child_set[child])\n            );\n          })\n        ) {\n          add_set_to_sets(first_parent_set, parent_sets, true);\n          add_set_to_sets(first_child_set, child_sets);\n        }\n      }\n      return { parent_sets, child_sets };\n    }\n\n    function find_one_bubble(bottom, children, child_sets, restricted) {\n      let expecting = { ...bottom };\n      let seen = {};\n      Object.keys(bottom).forEach(\n        (v) =>\n          children[v] &&\n          Object.keys(children[v]).forEach((v) => (seen[v] = true))\n      );\n      let q = Object.keys(expecting);\n      let last_top = null;\n      while (q.length) {\n        cur = q.shift();\n        if (!self.T[cur]) {\n          if (!restricted) throw \"bad\";\n          else return last_top;\n        }\n        if (restricted && restricted[cur]) return last_top;\n\n        if (seen[cur]) continue;\n\n        if (children[cur] && !Object.keys(children[cur]).every((c) => seen[c]))\n          continue;\n        seen[cur] = true;\n        delete expecting[cur];\n\n        if (!Object.keys(expecting).length) {\n          last_top = { [cur]: true };\n          if (!restricted) return last_top;\n        }\n\n        Object.keys(self.T[cur]).forEach((p) => {\n          expecting[p] = true;\n          q.push(p);\n        });\n\n        if (\n          child_sets[cur] &&\n          Object.keys(child_sets[cur].members).every((v) => seen[v])\n        ) {\n          let expecting_array = Object.keys(expecting);\n          let parent_set = self.T[cur];\n          let parent_array = Object.keys(parent_set);\n          if (\n            expecting_array.length == parent_array.length &&\n            expecting_array.every((v) => parent_set[v])\n          ) {\n            last_top = child_sets[cur].members;\n            if (!restricted) return last_top;\n          }\n        }\n      }\n      return last_top;\n    }\n\n    function add_version_group(version_array) {\n      let version_group = raw_add_version_group(version_array);\n      if (!version_array.some((x) => self.T[x])) return version_group[0];\n\n      let children = self.get_child_map();\n      let { parent_sets, child_sets } = get_parent_and_child_sets(children);\n\n      let to_bubble = {};\n      function mark_bubble(v, bubble) {\n        if (to_bubble[v]) return;\n        to_bubble[v] = bubble;\n        for (let vv of Object.keys(self.T[v])) mark_bubble(vv, bubble);\n      }\n\n      let bottom = Object.fromEntries(\n        version_group.filter((x) => self.T[x]).map((x) => [x, true])\n      );\n      let top = find_one_bubble(bottom, children, child_sets);\n      let bubble = [Object.keys(bottom).sort()[0], Object.keys(top)[0]];\n      for (let v of Object.keys(top)) to_bubble[v] = bubble;\n      for (let v of Object.keys(bottom)) mark_bubble(v, bubble);\n\n      self.apply_bubbles(to_bubble);\n      return version_group[0];\n    }\n\n    let orig_send = send;\n    send = (x) => {\n      if (self.version_groups[x.version])\n        x.version = self.version_groups[x.version];\n      if (x.parents) {\n        x.parents = { ...x.parents };\n        Object.keys(x.parents).forEach((v) =>\n          self.version_groups[v] && self.version_groups[v].forEach((v) => (x.parents[v] = true))\n        );\n      }\n      if (Array.isArray(x.versions)) {\n        x.versions = JSON.parse(JSON.stringify(x.versions));\n        x.versions.forEach(\n          (v) =>\n            self.version_groups[v.version] &&\n            (v.version = self.version_groups[v.version])\n        );\n        x.versions.forEach((v) => {\n          Object.keys(v.parents).forEach((vv) =>\n            self.version_groups[vv] && self.version_groups[vv].forEach((vv) => (v.parents[vv] = true))\n          );\n        });\n      }\n\n      orig_send(x);\n    };\n\n    /// # antimatter_crdt.receive(message)\n    ///\n    /// Let this antimatter object \"receive\" a message from another antimatter object, presumably from its `send` callback.\n    /// ``` js\n    /// websocket.on('message', data => {\n    ///     antimatter_crdt.receive(JSON.parse(data)) });\n    /// ```\n    /// You generally do not need to mess with a message object directly, but below are the various message objects you might see, categorized by their `cmd` entry. Note that each object also\n    ///   contains a `conn` entry with the id of the connection the message is sent\n    ///   over.\n    self.receive = (x) => {\n      let {\n        cmd,\n        version,\n        parents,\n        patches,\n        versions,\n        fissure,\n        fissures,\n        seen,\n        forget,\n        marco,\n        peer,\n        conn,\n      } = x;\n\n      if (version && typeof version != \"string\") {\n        if (!self.T[version[0]]) version = add_version_group(version);\n        else version = version[0];\n      }\n      if (parents) {\n        parents = { ...parents };\n        Object.keys(parents).forEach((v) => {\n          if (self.version_groups[v] && self.version_groups[v][0] != v)\n            delete parents[v];\n        });\n      }\n\n      if (versions && versions.forEach) versions.forEach((v) => {\n        if (typeof v.version != \"string\") {\n          if (!self.T[v.version[0]]) v.version = add_version_group(v.version);\n          else v.version = v.version[0];\n        }\n        v.parents = { ...v.parents };\n        Object.keys(v.parents).forEach((vv) => {\n          if (self.version_groups[vv] && self.version_groups[vv][0] != vv)\n            delete v.parents[vv];\n        });\n      });\n\n      let marco_versions_array = version\n        ? [version]\n        : versions && !Array.isArray(versions)\n        ? Object.keys(versions).sort()\n        : null;\n      let marco_versions =\n        marco_versions_array &&\n        Object.fromEntries(marco_versions_array.map((v) => [v, true]));\n\n      if (versions && !Array.isArray(versions)) {\n        versions = { ...versions };\n        Object.keys(versions).forEach((v) => {\n          if (self.version_groups[v] && self.version_groups[v][0] != v)\n            delete versions[v];\n        });\n        if (!Object.keys(versions).length) return;\n      }\n\n      /// ## message `get`\n      /// `get` is the first message sent over a connection, and the peer at the other end will respond with `welcome`.\n      /// ``` js\n      /// { cmd: 'get',\n      ///   peer: 'SENDER_ID',\n      ///   conn: 'CONN_ID',\n      ///   parents: {'PARENT_VERSION_ID': true, ...} }\n      /// ```\n      /// The `parents` are optional, and describes which versions this peer already has. The other end will respond with versions since that set of parents.\n      if (cmd == \"get\" || (cmd == \"welcome\" && peer != null)) {\n        if (self.conns[conn] != null) throw Error(\"bad\");\n        self.conns[conn] = { peer, seq: ++self.conn_count };\n      }\n\n      /// ## message `fissure`\n      ///\n      /// Sent to alert peers about a fissure. The `fissure` entry contains information about the two peers involved in the fissure, the specific connection id that broke, the `versions` that need to be protected, and the `time` of the fissure (in case we want to ignore it after some time). It is also possible to send multiple `fissures` in an array.\n      /// ``` js\n      /// { cmd: 'fissure',\n      ///   fissure: { // or fissures: [{...}, {...}, ...],\n      ///     a: 'PEER_A_ID',\n      ///     b:  'PEER_B_ID',\n      ///     conn: 'CONN_ID',\n      ///     versions: {'VERSION_ID': true, ...},\n      ///     time: Date.now()\n      ///   },\n      ///   conn: 'CONN_ID' }\n      /// ```\n      /// Note that `time` isn't used for anything critical, as it's just wallclock time.\n      if (fissure) fissures = [fissure];\n\n      if (fissures) fissures.forEach((f) => (f.t = self.conn_count));\n\n      if (versions && (cmd == \"set\" || cmd == \"welcome\"))\n        versions = Object.fromEntries(versions.map((v) => [v.version, v]));\n      if (version) versions = { [version]: true };\n\n      let rebased_patches = [];\n\n      let fissures_back = [];\n      let fissures_forward = [];\n      let fissures_done = {};\n\n      function copy_fissures(fs) {\n        return fs.map((f) => {\n          f = JSON.parse(JSON.stringify(f));\n          delete f.t;\n          return f;\n        });\n      }\n\n      if (fissures) {\n        let fiss_map = Object.fromEntries(\n          fissures.map((f) => [f.a + \":\" + f.b + \":\" + f.conn, f])\n        );\n        for (let [key, f] of Object.entries(fiss_map)) {\n          if (fissures_done[f.conn]) continue;\n          fissures_done[f.conn] = true;\n\n          let our_f = self.fissures[key];\n          let other_key = f.b + \":\" + f.a + \":\" + f.conn;\n          let their_other = fiss_map[other_key];\n          let our_other = self.fissures[other_key];\n\n          if (!our_f) self.fissures[key] = f;\n          if (their_other && !our_other) self.fissures[other_key] = their_other;\n\n          if (!their_other && !our_other && f.b == self.id) {\n            if (self.conns[f.conn]) delete self.conns[f.conn];\n            our_other = self.fissures[other_key] = {\n              ...f,\n              a: f.b,\n              b: f.a,\n              t: self.conn_count,\n            };\n          }\n\n          if (!their_other && our_other) {\n            fissures_back.push(f);\n            fissures_back.push(our_other);\n          }\n\n          if (!our_f || (their_other && !our_other)) {\n            fissures_forward.push(f);\n            if (their_other || our_other)\n              fissures_forward.push(their_other || our_other);\n          }\n        }\n      }\n\n      /// ## message `welcome`\n      /// Sent in response to a `get`, basically contains the initial state of the document; incoming `welcome` messages are also propagated over all our other connections but only with information that was new to us, so the propagation will eventually stop. When sent in response to a `get` (rather than being propagated), we include a `peer` entry with the id of the sending peer, so they know who we are, and to trigger them to send us their own  `welcome` message.\n      ///\n      /// ``` js\n      /// {\n      ///   cmd: 'welcome',\n      ///   versions: [\n      ///     //each version looks like a set message...\n      ///   ],\n      ///   fissures: [\n      ///     //each fissure looks as it would in a fissure message...\n      ///   ],\n      ///   parents: \n      ///     {\n      ///       //versions you must have before consuming these new versions\n      ///       'PARENT_VERSION_ID': true,\n      ///       ...\n      ///     },\n      ///   [peer: 'SENDER_ID'], // if responding to a get\n      ///   conn: 'CONN_ID'\n      /// } \n      /// ```\n      let _T = {};\n      let added_versions = [];\n      if (cmd == \"welcome\") {\n        var versions_to_add = {};\n        let vs = Object.values(versions);\n        vs.forEach((v) => (versions_to_add[v.version] = v.parents));\n        vs.forEach((v) => {\n          if (\n            self.T[v.version] ||\n            (self.version_groups[v.version] &&\n              self.version_groups[v.version][0] != v.version)\n          ) {\n            remove_ancestors(v.version);\n            function remove_ancestors(v) {\n              if (versions_to_add[v]) {\n                Object.keys(versions_to_add[v]).forEach(remove_ancestors);\n                delete versions_to_add[v];\n              }\n            }\n          }\n        });\n\n        for (let v of vs) _T[v.version] = v.parents;\n\n        l1: for (var v of vs) {\n          if (versions_to_add[v.version]) {\n            let ps = Object.keys(v.parents);\n\n            if (!ps.length && Object.keys(self.T).length) continue;\n            for (p of ps) if (!self.T[p]) continue l1;\n\n            rebased_patches = rebased_patches.concat(\n              self.add_version(v.version, v.parents, v.patches, v.sort_keys)\n            );\n\n            added_versions.push(v);\n            delete _T[v.version];\n          }\n        }\n      }\n\n      if (cmd == \"get\" || (cmd == \"welcome\" && peer != null)) {\n        let fissures_back = Object.values(self.fissures);\n\n        if (cmd == \"welcome\") {\n          var leaves = { ..._T };\n          Object.keys(_T).forEach((v) => {\n            Object.keys(_T[v]).forEach((p) => delete leaves[p]);\n          });\n\n          let f = {\n            a: self.id,\n            b: peer,\n            conn: \"-\" + conn,\n            versions: Object.fromEntries(\n              added_versions\n                .concat(Object.keys(leaves).map((v) => versions[v]))\n                .map((v) => [v.version, true])\n            ),\n            time: get_time(),\n            t: self.conn_count,\n          };\n          if (Object.keys(f.versions).length) {\n            let key = f.a + \":\" + f.b + \":\" + f.conn;\n            self.fissures[key] = f;\n            fissures_back.push(f);\n            fissures_forward.push(f);\n          }\n        }\n\n        send({\n          cmd: \"welcome\",\n          versions: self.generate_braid(parents || versions),\n          fissures: copy_fissures(fissures_back),\n          parents:\n            parents &&\n            Object.keys(parents).length &&\n            self.get_leaves(self.ancestors(parents, true)),\n          ...(cmd == \"get\" ? { peer: self.id } : {}),\n          conn,\n        });\n      } else if (fissures_back.length) {\n        send({\n          cmd: \"fissure\",\n          fissures: copy_fissures(fissures_back),\n          conn,\n        });\n      }\n\n      /// ## message `forget`\n      /// Used to disconnect without creating a fissure, presumably meaning the sending peer doesn't plan to make any edits while they're disconnected.\n      /// ``` js\n      /// {cmd: 'forget', conn: 'CONN_ID'}\n      /// ```\n      if (cmd == \"forget\") {\n        if (self.conns[conn] == null) throw Error(\"bad\");\n        send({ cmd: \"ack\", forget: true, conn });\n\n        delete self.conns[conn];\n        delete self.proto_conns[conn];\n      }\n\n      /// ## message forget `ack` \n      /// Sent in response to `forget`.. so they know we forgot them.\n      /// ``` js\n      /// {cmd: 'ack', forget: true, conn: 'CONN_ID'}\n      /// ```\n      if (cmd == \"ack\" && forget) {\n        self.forget_cbs[conn]();\n      }\n\n      /// ## message `set`\n      /// Sent to alert peers about a change in the document. The change is represented as a version, with a unique id, a set of parent versions (the most recent versions known before adding this version), and an array of patches, where the offsets in the patches do not take into account the application of other patches in the same array.\n      /// ``` js\n      /// { cmd: 'set',\n      ///   version: 'VERSION_ID',\n      ///   parents: {'PARENT_VERSION_ID': true, ...},\n      ///   patches: [ {range: '.json.path.a.b', content: 42}, ... ],\n      ///   conn: 'CONN_ID' }\n      /// ```\n      if (cmd == \"set\") {\n        if (conn == null || !self.T[version]) {\n          let ps = Object.keys(parents);\n\n          if (!ps.length && Object.keys(self.T).length) return;\n          for (p of ps) if (!self.T[p]) return;\n\n          rebased_patches = self.add_version(version, parents, patches);\n\n          for (let c of Object.keys(self.conns))\n            if (c != conn)\n              send({ cmd: \"set\", version, parents, patches, marco, conn: c });\n        }\n      }\n\n      /// ## message `marco`\n      /// Sent for pruning purposes, to try and establish whether everyone has seen the most recent versions. Note that a `set` message is treated as a `marco` message for the version being set.\n      /// ``` js\n      /// { cmd: 'marco',\n      ///   version: 'MARCO_ID',\n      ///   versions: {'VERSION_ID_A': true, ...},\n      ///   conn: 'CONN_ID' }\n      /// ```\n      if (cmd == \"marco\" || cmd == \"set\") {\n        if (!Object.keys(versions).every((v) => self.T[v])) return;\n\n        if (\n          self.marco_timeout &&\n          marco_versions_array.length ==\n            Object.keys(self.current_version).length &&\n          marco_versions_array.every((x) => self.current_version[x])\n        ) {\n          clear_timeout(self.marco_timeout);\n          self.marco_timeout = null;\n        }\n\n        let m = self.marcos[marco];\n        if (!m) {\n          m = self.marcos[marco] = {\n            id: marco,\n            origin: conn,\n            count: Object.keys(self.conns).length - (conn != null ? 1 : 0),\n            versions: marco_versions,\n            seq: self.conn_count,\n            time: get_time(),\n          };\n          m.orig_count = m.count;\n          m.real_marco = cmd == \"marco\";\n          m.key = JSON.stringify(Object.keys(m.versions).sort());\n          self.marco_map[m.key] = self.marco_map[m.key] || {};\n          let before = Object.keys(self.marco_map[m.key]).length;\n          self.marco_map[m.key][m.id] = true;\n          let after = Object.keys(self.marco_map[m.key]).length;\n          if (before == 1 && after == 2 && self.marco_increases_allowed > 0) {\n            self.marco_current_wait_time *= 2;\n            self.marco_increases_allowed--;\n          }\n\n          if (cmd == \"marco\")\n            for (let c of Object.keys(self.conns))\n              if (c != conn)\n                send({\n                  cmd: \"marco\",\n                  marco,\n                  versions: marco_versions,\n                  conn: c,\n                });\n        } else if (m.seq < self.conns[conn].seq) {\n          send({\n            cmd: \"ack\",\n            seen: \"local\",\n            marco,\n            versions: marco_versions,\n            conn,\n          });\n          return;\n        } else m.count--;\n        check_marco_count(marco);\n      }\n\n      /// ## message local `ack`\n      /// Sent in response to `set`, but not right away; a peer will first send the `set` over all its other connections, and only after they have all responded with a local `ack` – and we didn't see a `fissure` message while waiting – will the peer send a local `ack` over the originating connection.\n      /// ``` js\n      /// {cmd: 'ack', seen: 'local', version: 'VERSION_ID', conn: 'CONN_ID'}\n      /// ```\n      if (cmd == \"ack\" && seen == \"local\") {\n        let m = self.marcos[marco];\n        if (!m || m.cancelled) return;\n        m.count--;\n        check_marco_count(marco);\n      }\n      function check_marco_count(marco) {\n        let m = self.marcos[marco];\n        if (m && m.count === 0 && !m.cancelled) {\n          m.time2 = get_time();\n          if (m.orig_count > 0) {\n            let t = m.time2 - m.time;\n            let weight = 0.1;\n            self.marco_time_est_1 =\n              weight * t + (1 - weight) * self.marco_time_est_1;\n          }\n          if (m.origin != null) {\n            if (self.conns[m.origin])\n              send({\n                cmd: \"ack\",\n                seen: \"local\",\n                marco,\n                versions: marco_versions,\n                conn: m.origin,\n              });\n          } else add_full_ack_leaves(marco);\n        }\n      }\n\n      /// ## message global `ack`\n      /// Sent after an originating peer has received a local `ack` over all its connections, or after any peer receives a global `ack`, so that everyone may come to know that this version has been seen by everyone in this peer group.\n      /// ``` js\n      /// {cmd: 'ack', seen: 'global', version: 'VERSION_ID', conn: 'CONN_ID'}\n      /// ```\n      if (cmd == \"ack\" && seen == \"global\") {\n        let m = self.marcos[marco];\n\n        if (!m || m.cancelled) return;\n\n        let t = get_time() - m.time2;\n        let weight = 0.1;\n        self.marco_time_est_2 =\n          weight * t + (1 - weight) * self.marco_time_est_2;\n\n        if (m.real_marco && Object.keys(self.marco_map[m.key]).length == 1) {\n          self.marco_current_wait_time *= 0.8;\n        }\n\n        add_full_ack_leaves(marco, conn);\n      }\n      function add_full_ack_leaves(marco, conn) {\n        let m = self.marcos[marco];\n        if (!m || m.cancelled) return;\n        m.cancelled = true;\n\n        for (let [c, cc] of Object.entries(self.conns))\n          if (c != conn && cc.seq <= m.seq)\n            send({\n              cmd: \"ack\",\n              seen: \"global\",\n              marco,\n              versions: marco_versions,\n              conn: c,\n            });\n\n        for (let v of Object.keys(m.versions)) {\n          if (!self.T[v]) continue;\n          let marks = {};\n          let f = (v) => {\n            if (!marks[v]) {\n              marks[v] = true;\n              delete self.acked_boundary[v];\n              Object.keys(self.T[v]).forEach(f);\n            }\n          };\n          f(v);\n          self.acked_boundary[v] = true;\n        }\n        prune(false, m.seq);\n      }\n\n      if (added_versions.length || fissures_forward.length) {\n        for (let c of Object.keys(self.conns))\n          if (c != conn)\n            send({\n              cmd: added_versions.length ? \"welcome\" : \"fissure\",\n              ...(added_versions.length ? { versions: added_versions } : {}),\n              fissures: copy_fissures(fissures_forward),\n              conn: c,\n            });\n      }\n\n      if (fissures_forward.length) resolve_fissures();\n\n      if (\n        !self.marco_timeout &&\n        cmd != \"set\" &&\n        cmd != \"marco\" &&\n        prune(true)\n      ) {\n        if (!self.marco_current_wait_time) {\n          self.marco_current_wait_time =\n            4 * (self.marco_time_est_1 + self.marco_time_est_2);\n        }\n\n        let t = Math.random() * self.marco_current_wait_time;\n\n        self.marco_timeout = set_timeout(() => {\n          self.marco_increases_allowed = 1;\n          self.marco_timeout = null;\n          if (prune(true)) self.marco();\n        }, t);\n      }\n\n      if (cmd == \"welcome\" && peer == null && prune(true, null, true))\n        self.marco();\n\n      return rebased_patches;\n    };\n\n    /// # antimatter_crdt.get(conn) or connect(conn)\n    ///\n    /// Register a new connection with id `conn` – triggers this antimatter_crdt object to send a `get` message over the given connection. \n    ///\n    /// ``` js\n    /// alice_antimatter_crdt.get('connection_to_bob')\n    /// ```\n    self.get = (conn) => {\n      self.proto_conns[conn] = true;\n      send({ cmd: \"get\", peer: self.id, conn });\n    };\n    self.connect = self.get;\n\n    /// # antimatter_crdt.forget(conn)\n    ///\n    /// Disconnect the given connection without creating a fissure – we don't need to reconnect with them.. it seems.. if we do, then we need to call `disconnect` instead, which will create a fissure allowing us to reconnect.\n    ///\n    /// ``` js\n    /// alice_antimatter_crdt.forget('connection_to_bob')\n    /// ```\n    self.forget = async (conn) => {\n      await new Promise((done) => {\n        if (self.conns[conn] != null) {\n          self.forget_cbs[conn] = done;\n          send({ cmd: \"forget\", conn });\n        }\n        self.disconnect(conn, false);\n      });\n    };\n\n    /// # antimatter_crdt.disconnect(conn)\n    ///\n    /// If we detect that a connection has closed, let the antimatter_crdt object know by calling this method with the given connection id – this will create a fissure so we can reconnect with whoever was on the other end of the connection later on. \n    ///\n    /// ``` js\n    /// alice_antimatter_crdt.disconnect('connection_to_bob')\n    /// ```\n    self.disconnect = (conn, fissure = true) => {\n      if (self.conns[conn] == null && !self.proto_conns[conn]) return;\n      delete self.proto_conns[conn];\n\n      if (self.conns[conn]) {\n        let peer = self.conns[conn].peer;\n        delete self.conns[conn];\n\n        if (fissure) {\n          fissure = create_fissure(peer, conn);\n          if (fissure) self.receive({ cmd: \"fissure\", fissure });\n        }\n      }\n    };\n\n    /// # antimatter_crdt.set(...patches)\n    ///\n    /// Modify this antimatter_crdt object by applying the given patches. Each patch looks like `{range: '.life.meaning', content: 42}`. Calling this method will trigger calling the `send` callback to let our peers know about this change. \n    ///\n    /// ``` js\n    /// antimatter_crdt.set({\n    ///   range: '.life.meaning',\n    ///   content: 42\n    /// })\n    /// ```\n    self.set = (...patches) => {\n      var version = `${self.next_seq++}@${self.id}`;\n      self.receive({\n        cmd: \"set\",\n        version,\n        parents: { ...self.current_version },\n        patches,\n        marco: Math.random().toString(36).slice(2),\n      });\n      return version;\n    };\n\n    /// # antimatter_crdt.marco()\n    ///\n    /// Initiate sending a `marco` message to try and establish whether certain versions can be pruned. \n    ///\n    /// ``` js\n    /// antimatter_crdt.marco()\n    /// ```\n    self.marco = () => {\n      let versions = { ...self.current_version };\n      Object.keys(versions).forEach((v) =>\n        self.version_groups[v] && self.version_groups[v].forEach((v) => (versions[v] = true))\n      );\n\n      let marco = Math.random().toString(36).slice(2);\n      self.receive({ cmd: \"marco\", marco, versions });\n      return marco;\n    };\n\n    function cancel_marcos() {\n      for (let m of Object.values(self.marcos)) m.cancelled = true;\n    }\n\n    function create_fissure(peer, conn) {\n      let ack_versions = self.ancestors(self.acked_boundary);\n\n      let entries = Object.keys(self.T)\n        .filter((v) => !ack_versions[v] || self.acked_boundary[v])\n        .map((v) => [v, true]);\n      if (!entries.length) return;\n      let versions = Object.fromEntries(entries);\n      return { a: self.id, b: peer, conn, versions, time: get_time() };\n    }\n\n    function resolve_fissures() {\n      let unfissured = {};\n\n      Object.entries(self.fissures).forEach(([fk, f]) => {\n        var other_key = f.b + \":\" + f.a + \":\" + f.conn;\n        var other = self.fissures[other_key];\n        if (other) {\n          if (Object.keys(f.versions).length) {\n            for (let v of Object.keys(f.versions)) unfissured[v] = true;\n            self.fissures[fk] = { ...f, versions: {} };\n          }\n          if (Object.keys(other.versions).length) {\n            for (let v of Object.keys(other.versions)) unfissured[v] = true;\n            self.fissures[other_key] = { ...other, versions: {} };\n          }\n        }\n      });\n\n      if (Object.keys(unfissured).length) {\n        cancel_marcos();\n\n        let ack_versions = self.ancestors(self.acked_boundary);\n        let unfissured_descendants = self.descendants(unfissured, true);\n        for (let un of Object.keys(unfissured_descendants))\n          if (ack_versions[un]) delete ack_versions[un];\n        self.acked_boundary = self.get_leaves(ack_versions);\n      }\n    }\n\n    function prune(just_checking, t, just_versions) {\n      if (just_checking) t = Infinity;\n\n      let fissures = just_checking ? { ...self.fissures } : self.fissures;\n\n      Object.entries(fissures).forEach((x) => {\n        var other_key = x[1].b + \":\" + x[1].a + \":\" + x[1].conn;\n        var other = fissures[other_key];\n        if (other && x[1].t <= t && other.t <= t) {\n          delete fissures[x[0]];\n          delete fissures[other_key];\n        }\n      });\n\n      if (self.fissure_lifetime != null) {\n        var now = get_time();\n        Object.entries(fissures).forEach(([k, f]) => {\n          if (f.time == null) f.time = now;\n          if (f.time <= now - self.fissure_lifetime) {\n            delete fissures[k];\n          }\n        });\n      }\n\n      if (\n        just_checking &&\n        !just_versions &&\n        Object.keys(fissures).length < Object.keys(self.fissures).length\n      )\n        return true;\n\n      var restricted = {};\n\n      Object.values(fissures).forEach((f) => {\n        Object.keys(f.versions).forEach((v) => (restricted[v] = true));\n      });\n\n      if (!just_checking) {\n        var acked = self.ancestors(self.acked_boundary);\n        Object.keys(self.T).forEach((x) => {\n          if (!acked[x]) restricted[x] = true;\n        });\n      }\n\n      let children = self.get_child_map();\n      let { parent_sets, child_sets } = get_parent_and_child_sets(children);\n\n      let to_bubble = {};\n      function mark_bubble(v, bubble) {\n        if (to_bubble[v]) return;\n        to_bubble[v] = bubble;\n        for (let vv of Object.keys(self.T[v])) mark_bubble(vv, bubble);\n      }\n      let visited = {};\n      function f(cur) {\n        if (!self.T[cur] || visited[cur]) return;\n        visited[cur] = true;\n\n        if (\n          to_bubble[cur] == null &&\n          parent_sets[cur] &&\n          !parent_sets[cur].done\n        ) {\n          parent_sets[cur].done = true;\n          let bottom = parent_sets[cur].members;\n          let top = find_one_bubble(bottom, children, child_sets, restricted);\n          if (top) {\n            if (just_checking) return true;\n            let bottom_array = Object.keys(bottom).sort();\n            let top_array = Object.keys(top);\n            raw_add_version_group(bottom_array);\n            let bubble = [bottom_array[0], top_array[0]];\n            for (let v of top_array) to_bubble[v] = bubble;\n            for (let v of bottom_array) mark_bubble(v, bubble);\n          }\n        }\n        if (to_bubble[cur] == null) {\n          let top = find_one_bubble(\n            { [cur]: true },\n            children,\n            child_sets,\n            restricted\n          );\n          if (top && !top[cur]) {\n            if (just_checking) return true;\n            let bubble = [cur, Object.keys(top)[0]];\n            for (let v of Object.keys(top)) to_bubble[v] = bubble;\n            mark_bubble(bubble[0], bubble);\n          } else {\n            to_bubble[cur] = [cur, cur];\n          }\n        }\n        return Object.keys(\n          self.T[cur] || self.T[self.version_groups[cur][0]]\n        ).some(f);\n      }\n      if (Object.keys(self.current_version).some(f) && just_checking)\n        return true;\n\n      self.apply_bubbles(to_bubble);\n\n      for (let [k, m] of Object.entries(self.marcos)) {\n        let vs = Object.keys(m.versions);\n        if (\n          !vs.length ||\n          !vs.every((v) => self.T[v] || self.version_groups[v])\n        ) {\n          delete self.marcos[k];\n          delete self.marco_map[m.key][m.id];\n          if (!Object.keys(self.marco_map[m.key]).length)\n            delete self.marco_map[m.key];\n        }\n      }\n\n      for (let [v, vs] of Object.entries(self.version_groups)) {\n        if (!self.T[vs[0]]) delete self.version_groups[v];\n      }\n    }\n\n    return self;\n  };\n\n  /// ## create_json_crdt([init])\n  ///\n  /// Create a new `json_crdt` object (or start with `init`, and add stuff to that). \n  ///\n  /// ``` js\n  /// var json_crdt = create_json_crdt()\n  /// ``` \n  create_json_crdt = (self) => {\n    self = self || {};\n    self.S = self.S || null;\n    self.T = self.T || {};\n    self.root_version = null;\n    self.current_version = self.current_version || {};\n    self.version_cache = self.version_cache || {};\n\n    let is_lit = (x) => !x || typeof x != \"object\" || x.t == \"lit\";\n    let get_lit = (x) => (x && typeof x == \"object\" && x.t == \"lit\" ? x.S : x);\n    let make_lit = (x) => (x && typeof x == \"object\" ? { t: \"lit\", S: x } : x);\n    self = self || {};\n\n    /// # json_crdt.read()\n    ///\n    /// Returns an instance of the `json` object represented by this json_crdt data-structure. \n    ///\n    /// ``` js\n    /// console.log(json_crdt.read())\n    /// ```\n    self.read = (is_anc) => {\n      if (!is_anc) is_anc = () => true;\n\n      return raw_read(self.S, is_anc);\n    };\n\n    function raw_read(x, is_anc) {\n      if (x && typeof x == \"object\") {\n        if (x.t == \"lit\") return JSON.parse(JSON.stringify(x.S));\n        if (x.t == \"val\")\n          return raw_read(sequence_crdt.get(x.S, 0, is_anc), is_anc);\n        if (x.t == \"obj\") {\n          var o = {};\n          Object.entries(x.S).forEach(([k, v]) => {\n            var x = raw_read(v, is_anc);\n            if (x != null) o[k] = x;\n          });\n          return o;\n        }\n        if (x.t == \"arr\") {\n          var a = [];\n          sequence_crdt.traverse(\n            x.S,\n            is_anc,\n            (node, _, __, ___, ____, deleted) => {\n              if (!deleted)\n                node.elems.forEach((e) => a.push(raw_read(e, is_anc)));\n            },\n            true\n          );\n          return a;\n        }\n        if (x.t == \"str\") {\n          var s = [];\n          sequence_crdt.traverse(\n            x.S,\n            is_anc,\n            (node, _, __, ___, ____, deleted) => {\n              if (!deleted) s.push(node.elems);\n            },\n            true\n          );\n          return s.join(\"\");\n        }\n        throw Error(\"bad\");\n      }\n      return x;\n    }\n\n    /// # json_crdt.generate_braid(versions)\n    ///\n    /// Returns an array of `set` messages that each look like this: `{version, parents, patches, sort_keys}`, such that if we pass all these messages to `antimatter_crdt.receive()`, we'll reconstruct the data in this `json_crdt` data-structure, assuming the recipient already has the given `versions` (each version is represented as an object with a version, and each value is `true`).\n    ///\n    /// ``` js\n    /// json_crdt.generate_braid({\n    ///   alice2: true, \n    ///   bob3: true\n    /// })\n    /// ```\n    self.generate_braid = (versions) => {\n      var anc =\n        versions && Object.keys(versions).length\n          ? self.ancestors(versions, true)\n          : {};\n      var is_anc = (x) => anc[x];\n\n      if (Object.keys(self.T).length === 0) return [];\n\n      return Object.entries(self.version_cache)\n        .filter((x) => !is_anc(x[0]))\n        .map(([version, set_message]) => {\n          return (self.version_cache[version] =\n            set_message || generate_set_message(version));\n        });\n\n      function generate_set_message(version) {\n        if (!Object.keys(self.T[version]).length) {\n          return {\n            version,\n            parents: {},\n            patches: [{ range: \"\", content: self.read((v) => v == version) }],\n          };\n        }\n\n        var is_lit = (x) => !x || typeof x !== \"object\" || x.t === \"lit\";\n        var get_lit = (x) =>\n          x && typeof x === \"object\" && x.t === \"lit\" ? x.S : x;\n\n        var ancs = self.ancestors({ [version]: true });\n        delete ancs[version];\n        var is_anc = (x) => ancs[x];\n        var path = [];\n        var patches = [];\n        var sort_keys = {};\n        recurse(self.S);\n        function recurse(x) {\n          if (is_lit(x)) {\n          } else if (x.t === \"val\") {\n            sequence_crdt\n              .generate_braid(x.S, version, is_anc, raw_read)\n              .forEach((s) => {\n                if (s[2].length) {\n                  patches.push({ range: path.join(\"\"), content: s[2][0] });\n                  if (s[3]) sort_keys[patches.length - 1] = s[3];\n                }\n              });\n            sequence_crdt.traverse(x.S, is_anc, (node) => {\n              node.elems.forEach(recurse);\n            });\n          } else if (x.t === \"arr\") {\n            sequence_crdt.generate_braid(x.S, version, is_anc).forEach((s) => {\n              patches.push({\n                range: `${path.join(\"\")}[${s[0]}:${s[0] + s[1]}]`,\n                content: s[2],\n              });\n              if (s[3]) sort_keys[patches.length - 1] = s[3];\n            });\n            var i = 0;\n            sequence_crdt.traverse(x.S, is_anc, (node) => {\n              node.elems.forEach((e) => {\n                path.push(`[${i++}]`);\n                recurse(e);\n                path.pop();\n              });\n            });\n          } else if (x.t === \"obj\") {\n            Object.entries(x.S).forEach((e) => {\n              path.push(\"[\" + JSON.stringify(e[0]) + \"]\");\n              recurse(e[1]);\n              path.pop();\n            });\n          } else if (x.t === \"str\") {\n            sequence_crdt.generate_braid(x.S, version, is_anc).forEach((s) => {\n              patches.push({\n                range: `${path.join(\"\")}[${s[0]}:${s[0] + s[1]}]`,\n                content: s[2],\n              });\n              if (s[3]) sort_keys[patches.length - 1] = s[3];\n            });\n          }\n        }\n\n        return {\n          version,\n          parents: { ...self.T[version] },\n          patches,\n          sort_keys,\n        };\n      }\n    };\n\n    /// # json_crdt.apply_bubbles(to_bubble)\n    ///\n    /// This method helps prune away meta data and compress stuff when we have determined that certain versions can be renamed to other versions – these renamings are expressed in `to_bubble`, where keys are versions and values are \"bubbles\", each bubble is represented with an array of two elements, the first element is the \"bottom\" of the bubble, and the second element is the \"top\" of the bubble. We will use the \"bottom\" as the new name for the version, and we'll use the \"top\" as the new parents.\n    ///\n    /// ``` js \n    /// json_crdt.apply_bubbles({\n    ///   alice4: ['bob5', 'alice4'], \n    ///   bob5: ['bob5', 'alice4']\n    /// }) \n    /// ```\n    self.apply_bubbles = (to_bubble) => {\n      function recurse(x) {\n        if (is_lit(x)) return x;\n        if (x.t == \"val\") {\n          sequence_crdt.apply_bubbles(x.S, to_bubble);\n          sequence_crdt.traverse(\n            x.S,\n            () => true,\n            (node) => {\n              node.elems = node.elems.slice(0, 1).map(recurse);\n            },\n            true\n          );\n          if (\n            x.S.nexts.length == 0 &&\n            !x.S.next &&\n            x.S.elems.length == 1 &&\n            is_lit(x.S.elems[0])\n          )\n            return x.S.elems[0];\n          return x;\n        }\n        if (x.t == \"arr\") {\n          sequence_crdt.apply_bubbles(x.S, to_bubble);\n          sequence_crdt.traverse(\n            x.S,\n            () => true,\n            (node) => {\n              node.elems = node.elems.map(recurse);\n            },\n            true\n          );\n          if (\n            x.S.nexts.length == 0 &&\n            !x.S.next &&\n            x.S.elems.every(is_lit) &&\n            !Object.keys(x.S.deleted_by).length\n          )\n            return { t: \"lit\", S: x.S.elems.map(get_lit) };\n          return x;\n        }\n        if (x.t == \"obj\") {\n          Object.entries(x.S).forEach((e) => {\n            var y = (x.S[e[0]] = recurse(e[1]));\n            if (y == null) delete x.S[e[0]];\n          });\n          if (Object.values(x.S).every(is_lit)) {\n            var o = {};\n            Object.entries(x.S).forEach((e) => (o[e[0]] = get_lit(e[1])));\n            return { t: \"lit\", S: o };\n          }\n          return x;\n        }\n        if (x.t == \"str\") {\n          sequence_crdt.apply_bubbles(x.S, to_bubble);\n          if (\n            x.S.nexts.length == 0 &&\n            !x.S.next &&\n            !Object.keys(x.S.deleted_by).length\n          )\n            return x.S.elems;\n          return x;\n        }\n      }\n      self.S = recurse(self.S);\n\n      Object.entries(to_bubble).forEach(([version, bubble]) => {\n        if (!self.T[version]) return;\n\n        self.my_where_are_they_now[version] = bubble[0];\n\n        if (version === bubble[1]) self.T[bubble[0]] = self.T[bubble[1]];\n\n        if (version !== bubble[0]) {\n          if (self.root_version == version) self.root_version = bubble[0];\n          delete self.T[version];\n          delete self.version_cache[version];\n          delete self.acked_boundary[version];\n          delete self.current_version[version];\n          if (\n            self.version_groups[version] &&\n            self.version_groups[version][0] == version\n          ) {\n            for (let v of self.version_groups[version]) {\n              delete self.version_groups[v];\n            }\n          }\n          for (let [k, parents] of Object.entries(self.T)) {\n            self.T[k] = parents = { ...parents };\n            for (let p of Object.keys(parents)) {\n              if (p == version) delete parents[p];\n            }\n          }\n        } else self.version_cache[version] = null;\n      });\n\n      var leaves = Object.keys(self.current_version);\n      var acked_boundary = Object.keys(self.acked_boundary);\n      var fiss = Object.keys(self.fissures);\n      if (\n        leaves.length == 1 &&\n        acked_boundary.length == 1 &&\n        leaves[0] == acked_boundary[0] &&\n        fiss.length == 0\n      ) {\n        self.T = { [leaves[0]]: {} };\n        self.S = make_lit(self.read());\n      }\n    };\n\n    /// # json_crdt.add_version(version, parents, patches[, sort_keys])\n    ///\n    /// The main method for modifying a `json_crdt` data structure. \n    ///\n    /// * `version`: Unique string associated with this edit. \n    /// * `parents`: A set of versions that this version is aware of, represented as a map with versions as keys, and values of `true`. \n    /// * `patches`: An array of patches, each patch looks like this `{range: '.life.meaning', content: 42}`. \n    /// * `sort_keys`: (optional) An object where each key is an index, and the value is a sort_key to use with the patch at the given index in the `patches` array – a sort_key overrides the version for a patch for the purposes of sorting. This can be useful after doing some pruning. \n    ///\n    /// ``` js\n    /// json_crdt.add_version(\n    ///   'alice6', \n    ///   {\n    ///     alice5: true, \n    ///     bob7: true\n    ///   }, \n    ///   [\n    ///     {\n    ///       range: '.a.b', \n    ///       content: 'c'\n    ///     }\n    ///   ]\n    /// )\n    /// ``` \n    self.add_version = (version, parents, patches, sort_keys) => {\n      if (self.T[version]) return;\n\n      if (self.root_version == null) self.root_version = version;\n\n      self.T[version] = { ...parents };\n\n      self.version_cache[version] = JSON.parse(\n        JSON.stringify({\n          version,\n          parents,\n          patches,\n          sort_keys,\n        })\n      );\n\n      Object.keys(parents).forEach((k) => {\n        if (self.current_version[k]) delete self.current_version[k];\n      });\n      self.current_version[version] = true;\n\n      if (!sort_keys) sort_keys = {};\n\n      if (!Object.keys(parents).length) {\n        var parse = self.parse_patch(patches[0]);\n        self.S = make_lit(parse.value);\n        return patches;\n      }\n\n      let is_anc;\n      if (parents == self.current_version) {\n        is_anc = (_version) => _version != version;\n      } else {\n        let ancs = self.ancestors(parents);\n        is_anc = (_version) => ancs[_version];\n      }\n\n      var rebased_patches = [];\n      patches.forEach((patch, i) => {\n        var sort_key = sort_keys[i];\n        var parse = self.parse_patch(patch);\n        var cur = resolve_path(parse);\n        if (!parse.slice) {\n          if (cur.t != \"val\") throw Error(\"bad\");\n          var len = sequence_crdt.length(cur.S, is_anc);\n          sequence_crdt.add_version(\n            cur.S,\n            version,\n            [[0, len, [parse.delete ? null : make_lit(parse.value)], sort_key]],\n            is_anc\n          );\n          rebased_patches.push(patch);\n        } else {\n          if (typeof parse.value === \"string\" && cur.t !== \"str\")\n            throw Error(\n              `Cannot splice string ${JSON.stringify(\n                parse.value\n              )} into non-string`\n            );\n          if (parse.value instanceof Array && cur.t !== \"arr\")\n            throw Error(\n              `Cannot splice array ${JSON.stringify(\n                parse.value\n              )} into non-array`\n            );\n          if (parse.value instanceof Array)\n            parse.value = parse.value.map((x) => make_lit(x));\n\n          var r0 = parse.slice[0];\n          var r1 = parse.slice[1];\n          if (r0 < 0 || Object.is(r0, -0) || r1 < 0 || Object.is(r1, -0)) {\n            let len = sequence_crdt.length(cur.S, is_anc);\n            if (r0 < 0 || Object.is(r0, -0)) r0 = len + r0;\n            if (r1 < 0 || Object.is(r1, -0)) r1 = len + r1;\n          }\n\n          var rebased_splices = sequence_crdt.add_version(\n            cur.S,\n            version,\n            [[r0, r1 - r0, parse.value, sort_key]],\n            is_anc\n          );\n          for (let rebased_splice of rebased_splices)\n            rebased_patches.push({\n              range: `${parse.path\n                .map((x) => `[${JSON.stringify(x)}]`)\n                .join(\"\")}[${rebased_splice[0]}:${\n                rebased_splice[0] + rebased_splice[1]\n              }]`,\n              content: rebased_splice[2],\n            });\n        }\n      });\n\n      function resolve_path(parse) {\n        var cur = self.S;\n        if (!cur || typeof cur != \"object\" || cur.t == \"lit\")\n          cur = self.S = {\n            t: \"val\",\n            S: sequence_crdt.create_node(self.root_version, [cur]),\n          };\n        var prev_S = null;\n        var prev_i = 0;\n        for (var i = 0; i < parse.path.length; i++) {\n          var key = parse.path[i];\n          if (cur.t == \"val\")\n            cur = sequence_crdt.get((prev_S = cur.S), (prev_i = 0), is_anc);\n          if (cur.t == \"lit\") {\n            var new_cur = {};\n            if (cur.S instanceof Array) {\n              new_cur.t = \"arr\";\n              new_cur.S = sequence_crdt.create_node(\n                self.root_version,\n                cur.S.map((x) => make_lit(x))\n              );\n            } else {\n              if (typeof cur.S != \"object\") throw Error(\"bad\");\n              new_cur.t = \"obj\";\n              new_cur.S = {};\n              Object.entries(cur.S).forEach(\n                (e) => (new_cur.S[e[0]] = make_lit(e[1]))\n              );\n            }\n            cur = new_cur;\n            sequence_crdt.set(prev_S, prev_i, cur, is_anc);\n          }\n          if (cur.t == \"obj\") {\n            let x = cur.S[key];\n            if (!x || typeof x != \"object\" || x.t == \"lit\")\n              x = cur.S[key] = {\n                t: \"val\",\n                S: sequence_crdt.create_node(self.root_version, [\n                  x == null ? null : x,\n                ]),\n              };\n            cur = x;\n          } else if (i == parse.path.length - 1 && !parse.slice) {\n            parse.slice = [key, key + 1];\n            parse.value = cur.t == \"str\" ? parse.value : [parse.value];\n          } else if (cur.t == \"arr\") {\n            cur = sequence_crdt.get((prev_S = cur.S), (prev_i = key), is_anc);\n          } else throw Error(\"bad\");\n        }\n        if (parse.slice) {\n          if (cur.t == \"val\")\n            cur = sequence_crdt.get((prev_S = cur.S), (prev_i = 0), is_anc);\n          if (typeof cur == \"string\") {\n            cur = {\n              t: \"str\",\n              S: sequence_crdt.create_node(self.root_version, cur),\n            };\n            sequence_crdt.set(prev_S, prev_i, cur, is_anc);\n          } else if (cur.t == \"lit\") {\n            if (!(cur.S instanceof Array)) throw Error(\"bad\");\n            cur = {\n              t: \"arr\",\n              S: sequence_crdt.create_node(\n                self.root_version,\n                cur.S.map((x) => make_lit(x))\n              ),\n            };\n            sequence_crdt.set(prev_S, prev_i, cur, is_anc);\n          }\n        }\n        return cur;\n      }\n\n      return rebased_patches;\n    };\n\n    /// # json_crdt.get_child_map()\n    ///\n    /// Returns a map where each key is a version, and each value is a set of child versions, represented as a map with versions as keys, and values of `true`.\n    ///\n    /// ``` js\n    /// json_crdt.get_child_map()\n    /// ``` \n    self.get_child_map = () => {\n      let children = {};\n      Object.entries(self.T).forEach(([v, parents]) => {\n        Object.keys(parents).forEach((parent) => {\n          if (!children[parent]) children[parent] = {};\n          children[parent][v] = true;\n        });\n      });\n      return children;\n    };\n\n    /// # json_crdt.ancestors(versions, ignore_nonexistent=false)\n    ///\n    /// Gather `versions` and all their ancestors into a set. `versions` is a set of versions, i.e. a map with version-keys and values of true – we'll basically return a larger set. If `ignore_nonexistent` is `true`, then we won't throw an exception if we encounter a version that we don't have in our data-structure.\n    ///\n    /// ``` js\n    /// json_crdt.ancestors({\n    ///   alice12: true, \n    ///   bob10: true\n    /// }) \n    /// ``` \n    self.ancestors = (versions, ignore_nonexistent) => {\n      var result = {};\n      function recurse(version) {\n        if (result[version]) return;\n        if (!self.T[version]) {\n          if (ignore_nonexistent) return;\n          throw Error(`The version ${version} no existo`);\n        }\n        result[version] = true;\n        Object.keys(self.T[version]).forEach(recurse);\n      }\n      Object.keys(versions).forEach(recurse);\n      return result;\n    };\n\n    /// # json_crdt.descendants(versions, ignore_nonexistent=false)\n    ///\n    /// Gather `versions` and all their descendants into a set. `versions` is a set of versions, i.e. a map with version-keys and values of true – we'll basically return a larger set. If `ignore_nonexistent` is `true`, then we won't throw an exception if we encounter a version that we don't have in our data-structure.\n    ///\n    /// ``` js\n    /// json_crdt.descendants({\n    ///   alice12: true, \n    ///   bob10: true\n    /// }) \n    /// ``` \n    self.descendants = (versions, ignore_nonexistent) => {\n      let children = self.get_child_map();\n      var result = {};\n      function recurse(version) {\n        if (result[version]) return;\n        if (!self.T[version]) {\n          if (ignore_nonexistent) return;\n          throw Error(`The version ${version} no existo`);\n        }\n        result[version] = true;\n        Object.keys(children[version] || {}).forEach(recurse);\n      }\n      Object.keys(versions).forEach(recurse);\n      return result;\n    };\n\n    /// # json_crdt.get_leaves(versions)\n    ///\n    /// Returns a set of versions from `versions` which don't also have a child in `versions`. `versions` is itself a set of versions, represented as an object with version keys and `true` values, and the return value is represented the same way.\n    self.get_leaves = (versions) => {\n      var leaves = { ...versions };\n      Object.keys(versions).forEach((v) => {\n        Object.keys(self.T[v]).forEach((p) => delete leaves[p]);\n      });\n      return leaves;\n    };\n\n    /// # json_crdt.parse_patch(patch)\n    ///\n    /// Takes a patch in the form `{range, content}`, and returns an object of the form `{path: [...], [slice: [...]], [delete: true], content}`; basically calling `parse_json_path` on `patch.range`, and adding `patch.content` along for the ride.\n    self.parse_patch = (patch) => {\n      let x = self.parse_json_path(patch.range);\n      x.value = patch.content;\n      return x;\n    };\n\n    /// # json_crdt.parse_json_path(json_path)\n    ///\n    /// Parses the string `json_path` into an object like: `{path: [...], [slice: [...]], [delete: true]}`. \n    ///\n    /// * `a.b[3]` --> `{path: ['a', 'b', 3]}`\n    /// * `a.b[3:5]` --> `{path: ['a', 'b'], slice: [3, 5]}`\n    /// * `delete a.b` --> `{path: ['a', 'b'], delete: true}`\n    ///\n    /// ``` js\n    /// console.log(json_crdt.parse_json_path('a.b.c'))\n    /// ```\n    self.parse_json_path = (json_path) => {\n      var ret = { path: [] };\n      var re =\n        /^(delete)\\s+|\\.?([^\\.\\[ =]+)|\\[((\\-?\\d+)(:\\-?\\d+)?|\"(\\\\\"|[^\"])*\")\\]/g;\n      var m;\n      while ((m = re.exec(json_path))) {\n        if (m[1]) ret.delete = true;\n        else if (m[2]) ret.path.push(m[2]);\n        else if (m[3] && m[5])\n          ret.slice = [JSON.parse(m[4]), JSON.parse(m[5].substr(1))];\n        else if (m[3]) ret.path.push(JSON.parse(m[3]));\n      }\n      return ret;\n    };\n\n    return self;\n  };\n\n  /// # sequence_crdt.create_node(version, elems, [end_cap, sort_key])\n  ///\n  /// Creates a node for a `sequence_crdt` sequence CRDT with the given properties. The resulting node will look like this:\n  ///\n  /// ``` js\n  /// {\n  ///   version, // globally unique string\n  ///   elems, // a string or array representing actual data elements of the underlying sequence\n  ///   end_cap, // this is useful for dealing with replace operations\n  ///   sort_key, // version to pretend this is for the purposes of sorting\n  ///   deleted_by : {}, // if this node gets deleted, we'll mark it here\n  ///   nexts : [], // array of nodes following this one\n  ///   next : null // final node following this one (after all the nexts)\n  /// } \n  ///\n  /// var sequence_node = sequence_crdt.create_node('alice1', 'hello')\n  /// ```\n  sequence_crdt.create_node = (version, elems, end_cap, sort_key) => ({\n    version,\n    sort_key,\n    elems,\n    end_cap,\n    deleted_by: {},\n    nexts: [],\n    next: null,\n  });\n\n  /// # sequence_crdt.generate_braid(root_node, version, is_anc)\n  ///  \n  /// Reconstructs an array of splice-information which can be passed to `sequence_crdt.add_version` in order to add `version` to another `sequence_crdt` instance – the returned array looks like: `[[insert_pos, delete_count, insert_elems, sort_key], ...]`. `is_anc` is a function which accepts a version string and returns `true` if and only if the given version is an ancestor of `version` (i.e. a version which the author of `version` knew about when they created that version).\n  ///\n  /// ``` js\n  /// var root_node = sequence_crdt.create_node('alice1', 'hello')\n  /// console.log(sequence_crdt.generate_braid(root_node, 'alice1', x => false)) // outputs [0, 0, \"hello\"]\n  /// ```\n  sequence_crdt.generate_braid = (S, version, is_anc, read_array_elements) => {\n    if (!read_array_elements) read_array_elements = (x) => x;\n    var splices = [];\n\n    function add_ins(offset, ins, sort_key, end_cap, is_row_header) {\n      if (typeof ins !== \"string\")\n        ins = ins.map((x) => read_array_elements(x, () => false));\n      if (splices.length > 0) {\n        var prev = splices[splices.length - 1];\n        if (\n          prev[0] + prev[1] === offset &&\n          !end_cap &&\n          (!is_row_header || prev[3] == sort_key) &&\n          (prev[4] === \"i\" || (prev[4] === \"r\" && prev[1] === 0))\n        ) {\n          prev[2] = prev[2].concat(ins);\n          return;\n        }\n      }\n      splices.push([offset, 0, ins, sort_key, end_cap ? \"r\" : \"i\"]);\n    }\n\n    function add_del(offset, del, ins) {\n      if (splices.length > 0) {\n        var prev = splices[splices.length - 1];\n        if (prev[0] + prev[1] === offset && prev[4] !== \"i\") {\n          prev[1] += del;\n          return;\n        }\n      }\n      splices.push([offset, del, ins, null, \"d\"]);\n    }\n\n    var offset = 0;\n    function helper(node, _version, end_cap, is_row_header) {\n      if (_version === version) {\n        add_ins(\n          offset,\n          node.elems.slice(0),\n          node.sort_key,\n          end_cap,\n          is_row_header\n        );\n      } else if (node.deleted_by[version] && node.elems.length > 0) {\n        add_del(offset, node.elems.length, node.elems.slice(0, 0));\n      }\n\n      if (\n        (!_version || is_anc(_version)) &&\n        !Object.keys(node.deleted_by).some(is_anc)\n      ) {\n        offset += node.elems.length;\n      }\n\n      node.nexts.forEach((next) =>\n        helper(next, next.version, node.end_cap, true)\n      );\n      if (node.next) helper(node.next, _version);\n    }\n    helper(S, null);\n    splices.forEach((s) => {\n      // if we have replaces with 0 deletes,\n      // make them have at least 1 delete..\n      // this can happen when there are multiple replaces of the same text,\n      // and our code above will associate those deletes with only one of them\n      if (s[4] === \"r\" && s[1] === 0) s[1] = 1;\n    });\n    return splices;\n  };\n\n  /// # sequence_crdt.apply_bubbles(root_node, to_bubble)\n  ///\n  /// This method helps prune away meta data and compress stuff when we have determined that certain versions can be renamed to other versions – these renamings are expressed in `to_bubble`, where keys are versions and values are \"bubbles\", each bubble is represented with an array of two elements, the first element is the \"bottom\" of the bubble, and the second element is the \"top\" of the bubble. We will use the \"bottom\" as the new name for the version, and we'll use the \"top\" as the new parents.\n  /// \n  /// ``` js\n  /// sequence_crdt.apply_bubbles(root_node, {\n  ///   alice4: ['bob5', 'alice4'],\n  ///   bob5: ['bob5', 'alice4']\n  /// })\n  /// ```\n  sequence_crdt.apply_bubbles = (S, to_bubble) => {\n    sequence_crdt.traverse(\n      S,\n      () => true,\n      (node) => {\n        if (\n          to_bubble[node.version] &&\n          to_bubble[node.version][0] != node.version\n        ) {\n          if (!node.sort_key) node.sort_key = node.version;\n          node.version = to_bubble[node.version][0];\n        }\n\n        for (var x of Object.keys(node.deleted_by)) {\n          if (to_bubble[x]) {\n            delete node.deleted_by[x];\n            node.deleted_by[to_bubble[x][0]] = true;\n          }\n        }\n      },\n      true\n    );\n\n    function set_nnnext(node, next) {\n      while (node.next) node = node.next;\n      node.next = next;\n    }\n\n    do_line(S, S.version);\n    function do_line(node, version) {\n      var prev = null;\n      while (node) {\n        if (node.nexts[0] && node.nexts[0].version == version) {\n          for (let i = 0; i < node.nexts.length; i++) {\n            delete node.nexts[i].version;\n            delete node.nexts[i].sort_key;\n            set_nnnext(\n              node.nexts[i],\n              i + 1 < node.nexts.length ? node.nexts[i + 1] : node.next\n            );\n          }\n          node.next = node.nexts[0];\n          node.nexts = [];\n        }\n\n        if (node.deleted_by[version]) {\n          node.elems = node.elems.slice(0, 0);\n          node.deleted_by = {};\n          if (prev) {\n            node = prev;\n            continue;\n          }\n        }\n\n        var next = node.next;\n\n        if (\n          !node.nexts.length &&\n          next &&\n          (!node.elems.length ||\n            !next.elems.length ||\n            (Object.keys(node.deleted_by).every((x) => next.deleted_by[x]) &&\n              Object.keys(next.deleted_by).every((x) => node.deleted_by[x])))\n        ) {\n          if (!node.elems.length) node.deleted_by = next.deleted_by;\n          node.elems = node.elems.concat(next.elems);\n          node.end_cap = next.end_cap;\n          node.nexts = next.nexts;\n          node.next = next.next;\n          continue;\n        }\n\n        if (next && !next.elems.length && !next.nexts.length) {\n          node.next = next.next;\n          continue;\n        }\n\n        for (let n of node.nexts) do_line(n, n.version);\n\n        prev = node;\n        node = next;\n      }\n    }\n  };\n\n  /// # sequence_crdt.get(root_node, i, is_anc)\n  /// \n  /// Returns the element at the `i`th position (0-based) in the `sequence_crdt` rooted at `root_node`, when only considering versions which result in `true` when passed to `is_anc`.\n  /// \n  /// ``` js\n  /// var x = sequence_crdt.get(root_node, 2, {\n  ///     alice1: true\n  /// })\n  /// ```\n  sequence_crdt.get = (S, i, is_anc) => {\n    var ret = null;\n    var offset = 0;\n    sequence_crdt.traverse(S, is_anc ? is_anc : () => true, (node) => {\n      if (i - offset < node.elems.length) {\n        ret = node.elems[i - offset];\n        return false;\n      }\n      offset += node.elems.length;\n    });\n    return ret;\n  };\n\n  /// # sequence_crdt.set(root_node, i, v, is_anc)\n  /// \n  /// Sets the element at the `i`th position (0-based) in the `sequence_crdt` rooted at `root_node` to the value `v`, when only considering versions which result in `true` when passed to `is_anc`.\n  /// \n  /// ``` js\n  /// sequence_crdt.set(root_node, 2, 'x', {\n  ///   alice1: true\n  /// })\n  /// ```\n  sequence_crdt.set = (S, i, v, is_anc) => {\n    var offset = 0;\n    sequence_crdt.traverse(S, is_anc ? is_anc : () => true, (node) => {\n      if (i - offset < node.elems.length) {\n        if (typeof node.elems == \"string\")\n          node.elems =\n            node.elems.slice(0, i - offset) +\n            v +\n            node.elems.slice(i - offset + 1);\n        else node.elems[i - offset] = v;\n        return false;\n      }\n      offset += node.elems.length;\n    });\n  };\n\n  /// # sequence_crdt.length(root_node, is_anc)\n  /// \n  /// Returns the length of the `sequence_crdt` rooted at `root_node`, when only considering versions which result in `true` when passed to `is_anc`.\n  /// \n  /// ``` js\n  /// console.log(sequence_crdt.length(root_node, {\n  ///  alice1: true\n  /// }))\n  /// ```\n  sequence_crdt.length = (S, is_anc) => {\n    var count = 0;\n    sequence_crdt.traverse(S, is_anc ? is_anc : () => true, (node) => {\n      count += node.elems.length;\n    });\n    return count;\n  };\n\n  /// # sequence_crdt.break_node(node, break_position, end_cap, new_next)\n  /// \n  /// This method breaks apart a `sequence_crdt` node into two nodes, each representing a subsequence of the sequence represented by the original node. The `node` parameter is modified into the first node, and the second node is returned. The first node represents the elements of the sequence before `break_position`, and the second node represents the rest of the elements. If `end_cap` is truthy, then the first node will have `end_cap` set – this is generally done if the elements in the second node are being replaced. This method will add `new_next` to the first node's `nexts` array.\n  /// \n  /// ``` js\n  /// var node = sequence_crdt.create_node('alice1', 'hello') // node.elems == 'hello'\n  /// var second = sequence_crdt.break_node(node, 2) // now node.elems == 'he', and second.elems == 'llo'\n  /// ```\n  sequence_crdt.break_node = (node, x, end_cap, new_next) => {\n    var tail = sequence_crdt.create_node(\n      null,\n      node.elems.slice(x),\n      node.end_cap\n    );\n    Object.assign(tail.deleted_by, node.deleted_by);\n    tail.nexts = node.nexts;\n    tail.next = node.next;\n\n    node.elems = node.elems.slice(0, x);\n    node.end_cap = end_cap;\n    node.nexts = new_next ? [new_next] : [];\n    node.next = tail;\n\n    return tail;\n  };\n\n  /// # sequence_crdt.add_version(root_node, version, splices, [is_anc])\n  /// \n  /// This is the main method in sequence_crdt, used to modify the sequence. The modification must be given a unique `version` string, and the modification itself is represented as an array of `splices`, where each splice looks like this: `[position, num_elements_to_delete, elements_to_insert, optional_sort_key]`. \n  /// \n  /// Note that all positions are relative to the original sequence, before any splices have been applied. Positions are counted by only considering nodes with versions which result in `true` when passed to `is_anc`. (and are not `deleted_by` any versions which return `true` when passed to `is_anc`).\n  /// \n  /// ``` js\n  /// var node = sequence_crdt.create_node('alice1', 'hello') \n  /// sequence_crdt.add_version(node, 'alice2', [[5, 0, ' world']], null, v => v == 'alice1') \n  /// ```\n  sequence_crdt.add_version = (S, version, splices, is_anc) => {\n    var rebased_splices = [];\n\n    function add_to_nexts(nexts, to) {\n      var i = binarySearch(nexts, function (x) {\n        if ((to.sort_key || to.version) < (x.sort_key || x.version)) return -1;\n        if ((to.sort_key || to.version) > (x.sort_key || x.version)) return 1;\n        return 0;\n      });\n      nexts.splice(i, 0, to);\n    }\n\n    var si = 0;\n    var delete_up_to = 0;\n\n    var process_patch = (node, offset, has_nexts, prev, _version, deleted) => {\n      var s = splices[si];\n      if (!s) return;\n      var sort_key = s[3];\n\n      if (deleted) {\n        if (s[1] == 0 && s[0] == offset) {\n          if (node.elems.length == 0 && !node.end_cap && has_nexts) return;\n          var new_node = sequence_crdt.create_node(\n            version,\n            s[2],\n            null,\n            sort_key\n          );\n\n          fresh_nodes.add(new_node);\n\n          if (node.elems.length == 0 && !node.end_cap)\n            add_to_nexts(node.nexts, new_node);\n          else sequence_crdt.break_node(node, 0, undefined, new_node);\n          si++;\n        }\n\n        if (\n          delete_up_to <= offset &&\n          s[1] &&\n          s[2] &&\n          s[0] == offset &&\n          node.end_cap &&\n          !has_nexts &&\n          (node.next && node.next.elems.length) &&\n          !Object.keys(node.next.deleted_by).some((version) => f(version))\n        ) {\n          delete_up_to = s[0] + s[1];\n\n          var new_node = sequence_crdt.create_node(\n            version,\n            s[2],\n            null,\n            sort_key\n          );\n\n          fresh_nodes.add(new_node);\n\n          add_to_nexts(node.nexts, new_node);\n        }\n\n        return;\n      }\n\n      if (s[1] == 0) {\n        var d = s[0] - (offset + node.elems.length);\n        if (d > 0) return;\n        if (d == 0 && !node.end_cap && has_nexts) return;\n        var new_node = sequence_crdt.create_node(version, s[2], null, sort_key);\n\n        fresh_nodes.add(new_node);\n\n        if (d == 0 && !node.end_cap) {\n          add_to_nexts(node.nexts, new_node);\n        } else {\n          sequence_crdt.break_node(node, s[0] - offset, undefined, new_node);\n        }\n        si++;\n        return;\n      }\n\n      if (delete_up_to <= offset) {\n        var d = s[0] - (offset + node.elems.length);\n\n        let add_at_end =\n          d == 0 &&\n          s[2] &&\n          node.end_cap &&\n          !has_nexts &&\n          (node.next && node.next.elems.length) &&\n          !Object.keys(node.next.deleted_by).some((version) => f(version));\n\n        if (d > 0 || (d == 0 && !add_at_end)) return;\n\n        delete_up_to = s[0] + s[1];\n\n        if (s[2]) {\n          var new_node = sequence_crdt.create_node(\n            version,\n            s[2],\n            null,\n            sort_key\n          );\n\n          fresh_nodes.add(new_node);\n\n          if (add_at_end) {\n            add_to_nexts(node.nexts, new_node);\n          } else {\n            sequence_crdt.break_node(node, s[0] - offset, true, new_node);\n          }\n          return;\n        } else {\n          if (s[0] == offset) {\n          } else {\n            sequence_crdt.break_node(node, s[0] - offset);\n            return;\n          }\n        }\n      }\n\n      if (delete_up_to > offset) {\n        if (delete_up_to <= offset + node.elems.length) {\n          if (delete_up_to < offset + node.elems.length) {\n            sequence_crdt.break_node(node, delete_up_to - offset);\n          }\n          si++;\n        }\n        node.deleted_by[version] = true;\n        return;\n      }\n    };\n\n    var f = is_anc || (() => true);\n    var offset = 0;\n    var rebase_offset = 0;\n    let fresh_nodes = new Set();\n    function traverse(node, prev, version) {\n      if (!version || f(version)) {\n        var has_nexts = node.nexts.find((next) => f(next.version));\n        var deleted = Object.keys(node.deleted_by).some((version) =>\n          f(version)\n        );\n        let rebase_deleted = Object.keys(node.deleted_by).length;\n        process_patch(node, offset, has_nexts, prev, version, deleted);\n\n        if (!deleted) offset += node.elems.length;\n        if (!rebase_deleted && Object.keys(node.deleted_by).length)\n          rebased_splices.push([rebase_offset, node.elems.length, \"\"]);\n      }\n      if (fresh_nodes.has(node))\n        rebased_splices.push([rebase_offset, 0, node.elems]);\n      if (!Object.keys(node.deleted_by).length)\n        rebase_offset += node.elems.length;\n\n      for (var next of node.nexts) traverse(next, null, next.version);\n      if (node.next) traverse(node.next, node, version);\n    }\n    traverse(S, null, S.version);\n\n    return rebased_splices;\n  };\n\n  /// # sequence_crdt.traverse(root_node, is_anc, callback, [view_deleted, tail_callback])\n  /// \n  /// Traverses the subset of nodes in the tree rooted at `root_node` whose versions return `true` when passed to `is_anc`. For each node, `callback` is called with these parameters: `node, offset, has_nexts, prev, version, deleted`, \n  /// \n  /// Where\n  /// - `node` is the current node being traversed\n  /// - `offset` says how many elements we have passed so far \n  /// - `has_nexts` is true if some of this node's `nexts` will be traversed according to `is_anc`\n  /// - `prev` is a pointer to the node whos `next` points to this one, or `null` if this is the root node\n  /// - `version` is the version of this node, or this node's `prev` if our version is `null`, or that node's `prev` if it is also `null`, etc\n  /// - `deleted` is true if this node is deleted according to `is_anc`\n  /// \n  /// Usually we skip deleted nodes when traversing, but we'll include them if `view_deleted` is `true`. \n  /// \n  /// `tail_callback` is an optional callback that will get called with a single parameter `node` after all of that node's children `nexts` and `next` have been traversed. \n  /// \n  /// ``` js\n  /// sequence_crdt.traverse(node, () => true, node =>\n  ///   process.stdout.write(node.elems)) \n  /// ```\n  sequence_crdt.traverse = (S, f, cb, view_deleted, tail_cb) => {\n    var offset = 0;\n    function helper(node, prev, version) {\n      var has_nexts = node.nexts.find((next) => f(next.version));\n      var deleted = Object.keys(node.deleted_by).some((version) => f(version));\n      if (view_deleted || !deleted) {\n        if (cb(node, offset, has_nexts, prev, version, deleted) == false)\n          return true;\n        offset += node.elems.length;\n      }\n      for (var next of node.nexts)\n        if (f(next.version)) {\n          if (helper(next, null, next.version)) return true;\n        }\n      if (node.next) {\n        if (helper(node.next, node, version)) return true;\n      } else if (tail_cb) tail_cb(node);\n    }\n    helper(S, null, S.version);\n  };\n\n  // modified from https://stackoverflow.com/questions/22697936/binary-search-in-javascript\n  function binarySearch(ar, compare_fn) {\n    var m = 0;\n    var n = ar.length - 1;\n    while (m <= n) {\n      var k = (n + m) >> 1;\n      var cmp = compare_fn(ar[k]);\n      if (cmp > 0) {\n        m = k + 1;\n      } else if (cmp < 0) {\n        n = k - 1;\n      } else {\n        return k;\n      }\n    }\n    return m;\n  }\n})();\n\nif (typeof module != \"undefined\")\n  module.exports = {\n    create_antimatter_crdt,\n    create_json_crdt,\n    sequence_crdt,\n  };\n"
  },
  {
    "path": "antimatter_ts/doc.html",
    "content": "<head>\n<link rel=\"stylesheet\" href=\"https://unpkg.com/@highlightjs/cdn-assets@11.1.0/styles/default.min.css\">\n</head>\n\n<script src=\"https://unpkg.com/marked@4.0.5\"></script>\n\n<script src=\"https://unpkg.com/@highlightjs/cdn-assets@11.1.0/highlight.min.js\"></script>\n<script src=\"https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.1.0/languages/javascript.min.js\"></script>\n\n<body></body>\n<script>\n\n;(async () => {\n    let x = await fetch(`https://braid-org.github.io/braidjs/antimatter/antimatter.js`)\n    x = await x.text()\n    let code = x\n\n    x = await fetch(`https://braid-org.github.io/braidjs/antimatter/readme.md`)\n    x = await x.text()\n    let md = x\n\n    let code_blocks = []\n    if (true) {\n        let agg = code_blocks = []\n        let prev_o = 0\n        code.replace(/^[\\t ]*(?:antimatter|self|json|sequence)\\.(?:.*?) = (?:.*?)=> \\(?\\{\\n|^[\\t ]*(?:\\} else )?if \\(cmd == (?:.*?)\\) \\{\\n/gm, (_0, o) => {\n            agg.push(code.slice(prev_o, o))\n            prev_o = o\n        })\n        agg.push(code.slice(prev_o))\n    }\n    code_blocks = code_blocks.filter(x => x)\n\n    let md_blocks = []\n    if (true) {\n        let agg = md_blocks = []\n        let prev_o = 0\n        md.replace(/^(?:# antimatter|# json|# sequence|## message)/gm, (_0, o) => {\n            agg.push(md.slice(prev_o, o))\n            prev_o = o\n        })\n        agg.push(md.slice(prev_o))\n    }\n    md_blocks = md_blocks.filter(x => x)\n\n    function make_md(s) {\n        let d = make_html(`<div style=\"background:hsl(${Math.random() * 360}, 100%, 100%);width:50%\"></div>`)\n        d.innerHTML = marked.parse(s)\n        return d\n    }\n\n    function make_code(s) {\n        let vv = hljs.highlight(s, {language: 'javascript'}).value\n        let d = make_html(`<pre style=\"margin:0px;background:hsl(${Math.random() * 360}, 100%, 100%);width:50%\">${vv}</pre>`)\n        return d\n    }\n\n    while (md_blocks.length) {\n        let left = md_blocks.shift()\n        let right = code_blocks.shift()\n\n        let d = make_html(`<div style=\"border-top:1px solid black;display:flex;align-items: start;\"></div>`)\n        d.append(make_md(left))\n        d.append(make_code(right))\n        document.body.append(d)\n    }\n})()\n\nfunction make_html(s) {\n    let d = document.createElement('div')\n    d.innerHTML = s\n    return d.firstChild\n}\n\n</script>\n"
  },
  {
    "path": "antimatter_ts/package.json",
    "content": "{\n  \"name\": \"@braidjs/antimatter\",\n  \"version\": \"0.0.23\",\n  \"description\": \"antimatter: a pruning algorithm for CRDTs and other mergeables\",\n  \"main\": \"antimatter.js\",\n  \"scripts\": {\n    \"test\": \"node test.js\"\n  },\n  \"author\": \"Braid Working Group\",\n  \"repository\": \"braid-org/braidjs\",\n  \"homepage\": \"https://braid.org/antimatter\",\n  \"packageManager\": \"pnpm@9.0.4+sha256.caa915eaae9d9aefccf50ee8aeda25a2f8684d8f9d5c6e367eaf176d97c1f89e\",\n  \"dependencies\": {\n    \"typescript\": \"^5.6.2\"\n  }\n}\n"
  },
  {
    "path": "antimatter_ts/random002.js",
    "content": "\n// the next two functions added by me\n\nfunction create_rand(seed) {\n  if (typeof(seed) == 'string') {\n    var t = new MersenneTwister(0)\n    var a = []\n    for (var i = 0; i < seed.length; i++)\n      a[i] = seed.charCodeAt(i)\n    t.init_by_array(a, a.length)\n  } else if (typeof(seed) == 'number') {\n    var t = new MersenneTwister(seed)\n  } else {\n    var t = new MersenneTwister()\n  }\n  return () => t.random()\n}\n\nMath.randomSeed = function (seed) {\n  var r = create_rand(seed)\n  Math.random = () => r()\n}\n\n/* The following piece of code is an implementation of MersenneTwister object\n   taken from https://gist.github.com/banksean/300494, with one method \n   xor_array(array, size) added.\n*/\n\n/*\n  I've wrapped Makoto Matsumoto and Takuji Nishimura's code in a namespace\n  so it's better encapsulated. Now you can have multiple random number generators\n  and they won't stomp all over eachother's state.\n  \n  If you want to use this as a substitute for Math.random(), use the random()\n  method like so:\n  \n  var m = new MersenneTwister();\n  var randomNumber = m.random();\n  \n  You can also call the other genrand_{foo}() methods on the instance.\n\n  If you want to use a specific seed in order to get a repeatable random\n  sequence, pass an integer into the constructor:\n\n  var m = new MersenneTwister(123);\n\n  and that will always produce the same random sequence.\n\n  Sean McCullough (banksean@gmail.com)\n*/\n\n/* \n   A C-program for MT19937, with initialization improved 2002/1/26.\n   Coded by Takuji Nishimura and Makoto Matsumoto.\n \n   Before using, initialize the state by using init_genrand(seed)  \n   or init_by_array(init_key, key_length).\n \n   Copyright (C) 1997 - 2002, Makoto Matsumoto and Takuji Nishimura,\n   All rights reserved.                          \n \n   Redistribution and use in source and binary forms, with or without\n   modification, are permitted provided that the following conditions\n   are met:\n \n     1. Redistributions of source code must retain the above copyright\n        notice, this list of conditions and the following disclaimer.\n \n     2. Redistributions in binary form must reproduce the above copyright\n        notice, this list of conditions and the following disclaimer in the\n        documentation and/or other materials provided with the distribution.\n \n     3. The names of its contributors may not be used to endorse or promote \n        products derived from this software without specific prior written \n        permission.\n \n   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n   \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n   A PARTICULAR PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR\n   CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,\n   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,\n   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR\n   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF\n   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\n   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n \n \n   Any feedback is very welcome.\n   http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/emt.html\n   email: m-mat @ math.sci.hiroshima-u.ac.jp (remove space)\n*/\n\nvar MersenneTwister = function(seed) {\n  if (seed == undefined) {\n    seed = new Date().getTime();\n  } \n  /* Period parameters */  \n  this.N = 624;\n  this.M = 397;\n  this.MATRIX_A = 0x9908b0df;   /* constant vector a */\n  this.UPPER_MASK = 0x80000000; /* most significant w-r bits */\n  this.LOWER_MASK = 0x7fffffff; /* least significant r bits */\n \n  this.mt = new Array(this.N); /* the array for the state vector */\n  this.mti=this.N+1; /* mti==N+1 means mt[N] is not initialized */\n\n  this.init_genrand(seed);\n}  \n \n/* initializes mt[N] with a seed */\nMersenneTwister.prototype.init_genrand = function(s) {\n  this.mt[0] = s >>> 0;\n  for (this.mti=1; this.mti<this.N; this.mti++) {\n      var s = this.mt[this.mti-1] ^ (this.mt[this.mti-1] >>> 30);\n   this.mt[this.mti] = (((((s & 0xffff0000) >>> 16) * 1812433253) << 16) + (s & 0x0000ffff) * 1812433253)\n  + this.mti;\n      /* See Knuth TAOCP Vol2. 3rd Ed. P.106 for multiplier. */\n      /* In the previous versions, MSBs of the seed affect   */\n      /* only MSBs of the array mt[].                        */\n      /* 2002/01/09 modified by Makoto Matsumoto             */\n      this.mt[this.mti] >>>= 0;\n      /* for >32 bit machines */\n  }\n}\n \n/* initialize by an array with array-length */\n/* init_key is the array for initializing keys */\n/* key_length is its length */\n/* slight change for C++, 2004/2/26 */\nMersenneTwister.prototype.init_by_array = function(init_key, key_length) {\n  var i, j, k;\n  this.init_genrand(19650218);\n  i=1; j=0;\n  k = (this.N>key_length ? this.N : key_length);\n  for (; k; k--) {\n    var s = this.mt[i-1] ^ (this.mt[i-1] >>> 30)\n    this.mt[i] = (this.mt[i] ^ (((((s & 0xffff0000) >>> 16) * 1664525) << 16) + ((s & 0x0000ffff) * 1664525)))\n      + init_key[j] + j; /* non linear */\n    this.mt[i] >>>= 0; /* for WORDSIZE > 32 machines */\n    i++; j++;\n    if (i>=this.N) { this.mt[0] = this.mt[this.N-1]; i=1; }\n    if (j>=key_length) j=0;\n  }\n  for (k=this.N-1; k; k--) {\n    var s = this.mt[i-1] ^ (this.mt[i-1] >>> 30);\n    this.mt[i] = (this.mt[i] ^ (((((s & 0xffff0000) >>> 16) * 1566083941) << 16) + (s & 0x0000ffff) * 1566083941))\n      - i; /* non linear */\n    this.mt[i] >>>= 0; /* for WORDSIZE > 32 machines */\n    i++;\n    if (i>=this.N) { this.mt[0] = this.mt[this.N-1]; i=1; }\n  }\n\n  this.mt[0] = 0x80000000; /* MSB is 1; assuring non-zero initial array */ \n}\n \n/* XORs the mt array with a given array xor_key of length key_length */\nMersenneTwister.prototype.xor_array = function(xor_key, key_length) {\n  var i, j;\n  j = 0;\n  for (i = 0; i < this.N; i++) {\n    this.mt[i] ^= xor_key[j];\n    this.mt[i] >>>= 0;\n    j++;\n    if (j >= key_length) j = 0;\n  }\n}\n\n/* generates a random number on [0,0xffffffff]-interval */\nMersenneTwister.prototype.genrand_int32 = function() {\n  var y;\n  var mag01 = new Array(0x0, this.MATRIX_A);\n  /* mag01[x] = x * MATRIX_A  for x=0,1 */\n\n  if (this.mti >= this.N) { /* generate N words at one time */\n    var kk;\n\n    if (this.mti == this.N+1)   /* if init_genrand() has not been called, */\n      this.init_genrand(5489); /* a default initial seed is used */\n\n    for (kk=0;kk<this.N-this.M;kk++) {\n      y = (this.mt[kk]&this.UPPER_MASK)|(this.mt[kk+1]&this.LOWER_MASK);\n      this.mt[kk] = this.mt[kk+this.M] ^ (y >>> 1) ^ mag01[y & 0x1];\n    }\n    for (;kk<this.N-1;kk++) {\n      y = (this.mt[kk]&this.UPPER_MASK)|(this.mt[kk+1]&this.LOWER_MASK);\n      this.mt[kk] = this.mt[kk+(this.M-this.N)] ^ (y >>> 1) ^ mag01[y & 0x1];\n    }\n    y = (this.mt[this.N-1]&this.UPPER_MASK)|(this.mt[0]&this.LOWER_MASK);\n    this.mt[this.N-1] = this.mt[this.M-1] ^ (y >>> 1) ^ mag01[y & 0x1];\n\n    this.mti = 0;\n  }\n\n  y = this.mt[this.mti++];\n\n  /* Tempering */\n  y ^= (y >>> 11);\n  y ^= (y << 7) & 0x9d2c5680;\n  y ^= (y << 15) & 0xefc60000;\n  y ^= (y >>> 18);\n\n  return y >>> 0;\n}\n \n/* generates a random number on [0,0x7fffffff]-interval */\nMersenneTwister.prototype.genrand_int31 = function() {\n  return (this.genrand_int32()>>>1);\n}\n \n/* generates a random number on [0,1]-real-interval */\nMersenneTwister.prototype.genrand_real1 = function() {\n  return this.genrand_int32()*(1.0/4294967295.0); \n  /* divided by 2^32-1 */ \n}\n\n/* generates a random number on [0,1)-real-interval */\nMersenneTwister.prototype.random = function() {\n  return this.genrand_int32()*(1.0/4294967296.0); \n  /* divided by 2^32 */\n}\n \n/* generates a random number on (0,1)-real-interval */\nMersenneTwister.prototype.genrand_real3 = function() {\n  return (this.genrand_int32() + 0.5)*(1.0/4294967296.0); \n  /* divided by 2^32 */\n}\n \n/* generates a random number on [0,1) with 53-bit resolution*/\nMersenneTwister.prototype.genrand_res53 = function() { \n  var a=this.genrand_int32()>>>5, b=this.genrand_int32()>>>6; \n  return(a*67108864.0+b)*(1.0/9007199254740992.0); \n} \n\n/* These real versions are due to Isaku Wada, 2002/01/09 added */"
  },
  {
    "path": "antimatter_ts/readme.md",
    "content": "# antimatter: an algorithm that prunes CRDT/OT history\n\n[Antimatter](https://braid.org/antimatter) is the world's first peer-to-peer synchronization algorithm that can prune its history in a network where peers disconnect, reconnect, and merge offline edits.  Antimatter supports arbitrary simultaneous edits, from arbitrary peers, under arbitrary network delays and partitions, and guarantees full CRDT/OT consistency, while pruning unnecessary history within each partitioned subnet, and across subnets once they reconnect.  In steady state, it prunes down to zero overhead.  This lets you put synchronizing data structures in more parts of your software, without worrying about memory overhead.\n\nThis package implements an antimatter peer composed of three objects:\n\n```js\nvar {create_antimatter_crdt, create_json_crdt, sequence_crdt} = require('@braidjs/antimatter')\n```\n\n- *antimatter_crdt*: created using `create_antimatter_crdt`, this object is a json_crdt with antimatter algorithm methods added to it so that it can communicate with other peers to learn which history can be pruned, and tells the underlying json_crdt object to prune it.\n- *json_crdt*: created using `create_json_crdt`, this object is a pruneable JSON CRDT — \"JSON\" meaning it represents an arbitrary JSON datstructure, and \"CRDT\" and \"pruneable\" having the same meaning as for sequence_crdt below. The json_crdt makes recursive use of sequence_crdt structures to represent arbitrary JSON (for instance, a map is represented with a sequence_crdt structure for each value, where the first element in the sequence is the value).\n- *sequence_crdt*: methods to manipulate a pruneable sequence CRDT — \"sequence\" meaning it represents a javascript string or array, \"CRDT\" meaning this structure can be merged with other ones, and \"pruneable\" meaning that it supports an operation to remove meta-data when it is no longer needed (whereas CRDT's often keep track of this meta-data forever).\n\nThe Antimatter Algorithm was invented by Michael Toomim and Greg Little in the\n[Braid Project](https://braid.org) of [Invisible College](https://invisible.college/).\n\n[Click here to see more details, and the API side-by-side with the source code.](https://braid.org/antimatter)\n"
  },
  {
    "path": "antimatter_ts/src/antimatter_crdt.ts",
    "content": "/// # Software Architecture\n/// The software is architected into three objects:\n///\n/// ``` js\n/// let {create_antimatter_crdt, create_json_crdt, sequence_crdt} = require('@braidjs/antimatter') \n/// ```\n\nimport { create_json_crdt } from \"./json_crdt.ts\";\n\n// v522\n\n/// - *antimatter_crdt*: created using `create_antimatter_crdt`, this object is a json_crdt with antimatter algorithm methods added to it so that it can communicate with other peers to learn which history can be pruned, and tells the underlying json_crdt object to prune it.\nexport let create_antimatter_crdt;\n\n/// # create_antimatter_crdt(send[, init])\n///\n/// Creates and returns a new antimatter_crdt object (or adds antimatter_crdt methods and properties to `init`).\n///\n/// * `send`: A callback function to be called whenever this antimatter_crdt wants to send a\n///   message over a connection registered with `get` or `connect`. The sole\n///   parameter to this function is a JSONafiable object that hopes to be passed to\n///   the `receive` method on the antimatter_crdt object at the other end of the\n///   connection specified in the `conn` key.\n/// * `get_time`: function that returns a number representing time (e.g. `Date.now()`)\n/// * `set_timeout`: function that takes a callback and timeout length, and calls that callback after that amount of time; also returns an identifier that can be passed to `clear_timeout` to cancel the timeout (e.g. wrapping the javascript setTimeout)\n/// * `clear_timeout`: function that takes a timeout identifier an cancels it (e.g. wrapping the javascript clearTimeout)\n/// * `init`: (optional) An antimatter_crdt object to start with, which we'll add any properties to that it doesn't have, and we'll add all the antimatter_crdt methods to it. This option exists so you can serialize an antimatter_crdt instance as JSON, and then restore it later. \n/// ``` js\n/// let antimatter_crdt = create_antimatter_crdt(msg => {\n///     websockets[msg.conn].send(JSON.stringify(msg))\n///   },\n///   () => Date.now(),\n///   (func, t) => setTimeout(func, t),\n///   (t) => clearTimeout(t)),\n///.  JSON.parse(fs.readFileSync('./antimatter.backup'))\n/// )\n/// ```\ncreate_antimatter_crdt = (\n  send,\n  get_time,\n  set_timeout,\n  clear_timeout,\n  self\n) => {\n  self = create_json_crdt(self);\n  self.send = send;\n\n  self.id = self.id || Math.random().toString(36).slice(2);\n  self.next_seq = self.next_seq || 0;\n\n  self.conns = self.conns || {};\n  self.proto_conns = self.proto_conns || {};\n  self.conn_count = self.conn_count || 0;\n\n  self.fissures = self.fissures || {};\n  self.acked_boundary = self.acked_boundary || {};\n  self.marcos = self.marcos || {};\n  self.forget_cbs = self.forget_cbs || {};\n\n  self.version_groups = self.version_groups || {};\n\n  self.marco_map = self.marco_map || {};\n  self.marco_time_est_1 = self.marco_time_est_1 || 1000;\n  self.marco_time_est_2 = self.marco_time_est_2 || 1000;\n  self.marco_current_wait_time = self.marco_current_wait_time || 1000;\n  self.marco_increases_allowed = 1;\n  self.marco_timeout = self.marco_timeout || null;\n\n  function raw_add_version_group(version_array) {\n    let version_map = {};\n    for (let v of version_array) {\n      if (version_map[v]) continue;\n      version_map[v] = true;\n      if (self.version_groups[v]) self.version_groups[v].forEach((v) => (version_map[v] = true));\n    }\n    let version_group = Object.keys(version_map).sort();\n    version_group.forEach((v) => (self.version_groups[v] = version_group));\n    return version_group;\n  }\n\n  function get_parent_and_child_sets(children) {\n    let parent_sets = {};\n    let child_sets = {};\n    let done = {};\n    function add_set_to_sets(s, sets, mark_done) {\n      let container = { members: s };\n      let array = Object.keys(s);\n      if (array.length < 2) return;\n      for (let v of array) {\n        sets[v] = container;\n        if (mark_done) done[v] = true;\n      }\n    }\n    add_set_to_sets(self.current_version, parent_sets, true);\n    for (let v of Object.keys(self.T)) {\n      if (done[v]) continue;\n      done[v] = true;\n      if (!children[v]) continue;\n      let first_child_set = children[v];\n      let first_child_array = Object.keys(first_child_set);\n      let first_parent_set = self.T[first_child_array[0]];\n      let first_parent_array = Object.keys(first_parent_set);\n      if (\n        first_child_array.every((child) => {\n          let parent_set = self.T[child];\n          let parent_array = Object.keys(parent_set);\n          return (\n            parent_array.length == first_parent_array.length &&\n            parent_array.every((parent) => first_parent_set[parent])\n          );\n        }) &&\n        first_parent_array.every((parent) => {\n          let child_set = children[parent];\n          let child_array = Object.keys(child_set);\n          return (\n            child_array.length == first_child_array.length &&\n            child_array.every((child) => first_child_set[child])\n          );\n        })\n      ) {\n        add_set_to_sets(first_parent_set, parent_sets, true);\n        add_set_to_sets(first_child_set, child_sets);\n      }\n    }\n    return { parent_sets, child_sets };\n  }\n\n  function find_one_bubble(bottom, children, child_sets, restricted) {\n    let expecting = { ...bottom };\n    let seen = {};\n    Object.keys(bottom).forEach(\n      (v) =>\n        children[v] &&\n        Object.keys(children[v]).forEach((v) => (seen[v] = true))\n    );\n    let q = Object.keys(expecting);\n    let last_top = null;\n    while (q.length) {\n      cur = q.shift();\n      if (!self.T[cur]) {\n        if (!restricted) throw \"bad\";\n        else return last_top;\n      }\n      if (restricted && restricted[cur]) return last_top;\n\n      if (seen[cur]) continue;\n\n      if (children[cur] && !Object.keys(children[cur]).every((c) => seen[c]))\n        continue;\n      seen[cur] = true;\n      delete expecting[cur];\n\n      if (!Object.keys(expecting).length) {\n        last_top = { [cur]: true };\n        if (!restricted) return last_top;\n      }\n\n      Object.keys(self.T[cur]).forEach((p) => {\n        expecting[p] = true;\n        q.push(p);\n      });\n\n      if (\n        child_sets[cur] &&\n        Object.keys(child_sets[cur].members).every((v) => seen[v])\n      ) {\n        let expecting_array = Object.keys(expecting);\n        let parent_set = self.T[cur];\n        let parent_array = Object.keys(parent_set);\n        if (\n          expecting_array.length == parent_array.length &&\n          expecting_array.every((v) => parent_set[v])\n        ) {\n          last_top = child_sets[cur].members;\n          if (!restricted) return last_top;\n        }\n      }\n    }\n    return last_top;\n  }\n\n  function add_version_group(version_array) {\n    let version_group = raw_add_version_group(version_array);\n    if (!version_array.some((x) => self.T[x])) return version_group[0];\n\n    let children = self.get_child_map();\n    let { parent_sets, child_sets } = get_parent_and_child_sets(children);\n\n    let to_bubble = {};\n    function mark_bubble(v, bubble) {\n      if (to_bubble[v]) return;\n      to_bubble[v] = bubble;\n      for (let vv of Object.keys(self.T[v])) mark_bubble(vv, bubble);\n    }\n\n    let bottom = Object.fromEntries(\n      version_group.filter((x) => self.T[x]).map((x) => [x, true])\n    );\n    let top = find_one_bubble(bottom, children, child_sets);\n    let bubble = [Object.keys(bottom).sort()[0], Object.keys(top)[0]];\n    for (let v of Object.keys(top)) to_bubble[v] = bubble;\n    for (let v of Object.keys(bottom)) mark_bubble(v, bubble);\n\n    self.apply_bubbles(to_bubble);\n    return version_group[0];\n  }\n\n  let orig_send = send;\n  send = (x) => {\n    if (self.version_groups[x.version])\n      x.version = self.version_groups[x.version];\n    if (x.parents) {\n      x.parents = { ...x.parents };\n      Object.keys(x.parents).forEach((v) =>\n        self.version_groups[v] && self.version_groups[v].forEach((v) => (x.parents[v] = true))\n      );\n    }\n    if (Array.isArray(x.versions)) {\n      x.versions = JSON.parse(JSON.stringify(x.versions));\n      x.versions.forEach(\n        (v) =>\n          self.version_groups[v.version] &&\n          (v.version = self.version_groups[v.version])\n      );\n      x.versions.forEach((v) => {\n        Object.keys(v.parents).forEach((vv) =>\n          self.version_groups[vv] && self.version_groups[vv].forEach((vv) => (v.parents[vv] = true))\n        );\n      });\n    }\n\n    orig_send(x);\n  };\n\n  /// # antimatter_crdt.receive(message)\n  ///\n  /// Let this antimatter object \"receive\" a message from another antimatter object, presumably from its `send` callback.\n  /// ``` js\n  /// websocket.on('message', data => {\n  ///     antimatter_crdt.receive(JSON.parse(data)) });\n  /// ```\n  /// You generally do not need to mess with a message object directly, but below are the various message objects you might see, categorized by their `cmd` entry. Note that each object also\n  ///   contains a `conn` entry with the id of the connection the message is sent\n  ///   over.\n  self.receive = (x) => {\n    let {\n      cmd,\n      version,\n      parents,\n      patches,\n      versions,\n      fissure,\n      fissures,\n      seen,\n      forget,\n      marco,\n      peer,\n      conn,\n    } = x;\n\n    if (version && typeof version != \"string\") {\n      if (!self.T[version[0]]) version = add_version_group(version);\n      else version = version[0];\n    }\n    if (parents) {\n      parents = { ...parents };\n      Object.keys(parents).forEach((v) => {\n        if (self.version_groups[v] && self.version_groups[v][0] != v)\n          delete parents[v];\n      });\n    }\n\n    if (versions && versions.forEach) versions.forEach((v) => {\n      if (typeof v.version != \"string\") {\n        if (!self.T[v.version[0]]) v.version = add_version_group(v.version);\n        else v.version = v.version[0];\n      }\n      v.parents = { ...v.parents };\n      Object.keys(v.parents).forEach((vv) => {\n        if (self.version_groups[vv] && self.version_groups[vv][0] != vv)\n          delete v.parents[vv];\n      });\n    });\n\n    let marco_versions_array = version\n      ? [version]\n      : versions && !Array.isArray(versions)\n        ? Object.keys(versions).sort()\n        : null;\n    let marco_versions =\n      marco_versions_array &&\n      Object.fromEntries(marco_versions_array.map((v) => [v, true]));\n\n    if (versions && !Array.isArray(versions)) {\n      versions = { ...versions };\n      Object.keys(versions).forEach((v) => {\n        if (self.version_groups[v] && self.version_groups[v][0] != v)\n          delete versions[v];\n      });\n      if (!Object.keys(versions).length) return;\n    }\n\n    /// ## message `get`\n    /// `get` is the first message sent over a connection, and the peer at the other end will respond with `welcome`.\n    /// ``` js\n    /// { cmd: 'get',\n    ///   peer: 'SENDER_ID',\n    ///   conn: 'CONN_ID',\n    ///   parents: {'PARENT_VERSION_ID': true, ...} }\n    /// ```\n    /// The `parents` are optional, and describes which versions this peer already has. The other end will respond with versions since that set of parents.\n    if (cmd == \"get\" || (cmd == \"welcome\" && peer != null)) {\n      if (self.conns[conn] != null) throw Error(\"bad\");\n      self.conns[conn] = { peer, seq: ++self.conn_count };\n    }\n\n    /// ## message `fissure`\n    ///\n    /// Sent to alert peers about a fissure. The `fissure` entry contains information about the two peers involved in the fissure, the specific connection id that broke, the `versions` that need to be protected, and the `time` of the fissure (in case we want to ignore it after some time). It is also possible to send multiple `fissures` in an array.\n    /// ``` js\n    /// { cmd: 'fissure',\n    ///   fissure: { // or fissures: [{...}, {...}, ...],\n    ///     a: 'PEER_A_ID',\n    ///     b:  'PEER_B_ID',\n    ///     conn: 'CONN_ID',\n    ///     versions: {'VERSION_ID': true, ...},\n    ///     time: Date.now()\n    ///   },\n    ///   conn: 'CONN_ID' }\n    /// ```\n    /// Note that `time` isn't used for anything critical, as it's just wallclock time.\n    if (fissure) fissures = [fissure];\n\n    if (fissures) fissures.forEach((f) => (f.t = self.conn_count));\n\n    if (versions && (cmd == \"set\" || cmd == \"welcome\"))\n      versions = Object.fromEntries(versions.map((v) => [v.version, v]));\n    if (version) versions = { [version]: true };\n\n    let rebased_patches = [];\n\n    let fissures_back = [];\n    let fissures_forward = [];\n    let fissures_done = {};\n\n    function copy_fissures(fs) {\n      return fs.map((f) => {\n        f = JSON.parse(JSON.stringify(f));\n        delete f.t;\n        return f;\n      });\n    }\n\n    if (fissures) {\n      let fiss_map = Object.fromEntries(\n        fissures.map((f) => [f.a + \":\" + f.b + \":\" + f.conn, f])\n      );\n      for (let [key, f] of Object.entries(fiss_map)) {\n        if (fissures_done[f.conn]) continue;\n        fissures_done[f.conn] = true;\n\n        let our_f = self.fissures[key];\n        let other_key = f.b + \":\" + f.a + \":\" + f.conn;\n        let their_other = fiss_map[other_key];\n        let our_other = self.fissures[other_key];\n\n        if (!our_f) self.fissures[key] = f;\n        if (their_other && !our_other) self.fissures[other_key] = their_other;\n\n        if (!their_other && !our_other && f.b == self.id) {\n          if (self.conns[f.conn]) delete self.conns[f.conn];\n          our_other = self.fissures[other_key] = {\n            ...f,\n            a: f.b,\n            b: f.a,\n            t: self.conn_count,\n          };\n        }\n\n        if (!their_other && our_other) {\n          fissures_back.push(f);\n          fissures_back.push(our_other);\n        }\n\n        if (!our_f || (their_other && !our_other)) {\n          fissures_forward.push(f);\n          if (their_other || our_other)\n            fissures_forward.push(their_other || our_other);\n        }\n      }\n    }\n\n    /// ## message `welcome`\n    /// Sent in response to a `get`, basically contains the initial state of the document; incoming `welcome` messages are also propagated over all our other connections but only with information that was new to us, so the propagation will eventually stop. When sent in response to a `get` (rather than being propagated), we include a `peer` entry with the id of the sending peer, so they know who we are, and to trigger them to send us their own  `welcome` message.\n    ///\n    /// ``` js\n    /// {\n    ///   cmd: 'welcome',\n    ///   versions: [\n    ///     //each version looks like a set message...\n    ///   ],\n    ///   fissures: [\n    ///     //each fissure looks as it would in a fissure message...\n    ///   ],\n    ///   parents: \n    ///     {\n    ///       //versions you must have before consuming these new versions\n    ///       'PARENT_VERSION_ID': true,\n    ///       ...\n    ///     },\n    ///   [peer: 'SENDER_ID'], // if responding to a get\n    ///   conn: 'CONN_ID'\n    /// } \n    /// ```\n    let _T = {};\n    let added_versions = [];\n    if (cmd == \"welcome\") {\n      let versions_to_add = {};\n      let vs = Object.values(versions);\n      vs.forEach((v) => (versions_to_add[v.version] = v.parents));\n      vs.forEach((v) => {\n        if (\n          self.T[v.version] ||\n          (self.version_groups[v.version] &&\n            self.version_groups[v.version][0] != v.version)\n        ) {\n          remove_ancestors(v.version);\n          function remove_ancestors(v) {\n            if (versions_to_add[v]) {\n              Object.keys(versions_to_add[v]).forEach(remove_ancestors);\n              delete versions_to_add[v];\n            }\n          }\n        }\n      });\n\n      for (let v of vs) _T[v.version] = v.parents;\n\n      l1: for (let v of vs) {\n        if (versions_to_add[v.version]) {\n          let ps = Object.keys(v.parents);\n\n          if (!ps.length && Object.keys(self.T).length) continue;\n          for (p of ps) if (!self.T[p]) continue l1;\n\n          rebased_patches = rebased_patches.concat(\n            self.add_version(v.version, v.parents, v.patches, v.sort_keys)\n          );\n\n          added_versions.push(v);\n          delete _T[v.version];\n        }\n      }\n    }\n\n    if (cmd == \"get\" || (cmd == \"welcome\" && peer != null)) {\n      let fissures_back = Object.values(self.fissures);\n\n      if (cmd == \"welcome\") {\n        let leaves = { ..._T };\n        Object.keys(_T).forEach((v) => {\n          Object.keys(_T[v]).forEach((p) => delete leaves[p]);\n        });\n\n        let f = {\n          a: self.id,\n          b: peer,\n          conn: \"-\" + conn,\n          versions: Object.fromEntries(\n            added_versions\n              .concat(Object.keys(leaves).map((v) => versions[v]))\n              .map((v) => [v.version, true])\n          ),\n          time: get_time(),\n          t: self.conn_count,\n        };\n        if (Object.keys(f.versions).length) {\n          let key = f.a + \":\" + f.b + \":\" + f.conn;\n          self.fissures[key] = f;\n          fissures_back.push(f);\n          fissures_forward.push(f);\n        }\n      }\n\n      send({\n        cmd: \"welcome\",\n        versions: self.generate_braid(parents || versions),\n        fissures: copy_fissures(fissures_back),\n        parents:\n          parents &&\n          Object.keys(parents).length &&\n          self.get_leaves(self.ancestors(parents, true)),\n        ...(cmd == \"get\" ? { peer: self.id } : {}),\n        conn,\n      });\n    } else if (fissures_back.length) {\n      send({\n        cmd: \"fissure\",\n        fissures: copy_fissures(fissures_back),\n        conn,\n      });\n    }\n\n    /// ## message `forget`\n    /// Used to disconnect without creating a fissure, presumably meaning the sending peer doesn't plan to make any edits while they're disconnected.\n    /// ``` js\n    /// {cmd: 'forget', conn: 'CONN_ID'}\n    /// ```\n    if (cmd == \"forget\") {\n      if (self.conns[conn] == null) throw Error(\"bad\");\n      send({ cmd: \"ack\", forget: true, conn });\n\n      delete self.conns[conn];\n      delete self.proto_conns[conn];\n    }\n\n    /// ## message forget `ack` \n    /// Sent in response to `forget`.. so they know we forgot them.\n    /// ``` js\n    /// {cmd: 'ack', forget: true, conn: 'CONN_ID'}\n    /// ```\n    if (cmd == \"ack\" && forget) {\n      self.forget_cbs[conn]();\n    }\n\n    /// ## message `set`\n    /// Sent to alert peers about a change in the document. The change is represented as a version, with a unique id, a set of parent versions (the most recent versions known before adding this version), and an array of patches, where the offsets in the patches do not take into account the application of other patches in the same array.\n    /// ``` js\n    /// { cmd: 'set',\n    ///   version: 'VERSION_ID',\n    ///   parents: {'PARENT_VERSION_ID': true, ...},\n    ///   patches: [ {range: '.json.path.a.b', content: 42}, ... ],\n    ///   conn: 'CONN_ID' }\n    /// ```\n    if (cmd == \"set\") {\n      if (conn == null || !self.T[version]) {\n        let ps = Object.keys(parents);\n\n        if (!ps.length && Object.keys(self.T).length) return;\n        for (p of ps) if (!self.T[p]) return;\n\n        rebased_patches = self.add_version(version, parents, patches);\n\n        for (let c of Object.keys(self.conns))\n          if (c != conn)\n            send({ cmd: \"set\", version, parents, patches, marco, conn: c });\n      }\n    }\n\n    /// ## message `marco`\n    /// Sent for pruning purposes, to try and establish whether everyone has seen the most recent versions. Note that a `set` message is treated as a `marco` message for the version being set.\n    /// ``` js\n    /// { cmd: 'marco',\n    ///   version: 'MARCO_ID',\n    ///   versions: {'VERSION_ID_A': true, ...},\n    ///   conn: 'CONN_ID' }\n    /// ```\n    if (cmd == \"marco\" || cmd == \"set\") {\n      if (!Object.keys(versions).every((v) => self.T[v])) return;\n\n      if (\n        self.marco_timeout &&\n        marco_versions_array.length ==\n        Object.keys(self.current_version).length &&\n        marco_versions_array.every((x) => self.current_version[x])\n      ) {\n        clear_timeout(self.marco_timeout);\n        self.marco_timeout = null;\n      }\n\n      let m = self.marcos[marco];\n      if (!m) {\n        m = self.marcos[marco] = {\n          id: marco,\n          origin: conn,\n          count: Object.keys(self.conns).length - (conn != null ? 1 : 0),\n          versions: marco_versions,\n          seq: self.conn_count,\n          time: get_time(),\n        };\n        m.orig_count = m.count;\n        m.real_marco = cmd == \"marco\";\n        m.key = JSON.stringify(Object.keys(m.versions).sort());\n        self.marco_map[m.key] = self.marco_map[m.key] || {};\n        let before = Object.keys(self.marco_map[m.key]).length;\n        self.marco_map[m.key][m.id] = true;\n        let after = Object.keys(self.marco_map[m.key]).length;\n        if (before == 1 && after == 2 && self.marco_increases_allowed > 0) {\n          self.marco_current_wait_time *= 2;\n          self.marco_increases_allowed--;\n        }\n\n        if (cmd == \"marco\")\n          for (let c of Object.keys(self.conns))\n            if (c != conn)\n              send({\n                cmd: \"marco\",\n                marco,\n                versions: marco_versions,\n                conn: c,\n              });\n      } else if (m.seq < self.conns[conn].seq) {\n        send({\n          cmd: \"ack\",\n          seen: \"local\",\n          marco,\n          versions: marco_versions,\n          conn,\n        });\n        return;\n      } else m.count--;\n      check_marco_count(marco);\n    }\n\n    /// ## message local `ack`\n    /// Sent in response to `set`, but not right away; a peer will first send the `set` over all its other connections, and only after they have all responded with a local `ack` – and we didn't see a `fissure` message while waiting – will the peer send a local `ack` over the originating connection.\n    /// ``` js\n    /// {cmd: 'ack', seen: 'local', version: 'VERSION_ID', conn: 'CONN_ID'}\n    /// ```\n    if (cmd == \"ack\" && seen == \"local\") {\n      let m = self.marcos[marco];\n      if (!m || m.cancelled) return;\n      m.count--;\n      check_marco_count(marco);\n    }\n    function check_marco_count(marco) {\n      let m = self.marcos[marco];\n      if (m && m.count === 0 && !m.cancelled) {\n        m.time2 = get_time();\n        if (m.orig_count > 0) {\n          let t = m.time2 - m.time;\n          let weight = 0.1;\n          self.marco_time_est_1 =\n            weight * t + (1 - weight) * self.marco_time_est_1;\n        }\n        if (m.origin != null) {\n          if (self.conns[m.origin])\n            send({\n              cmd: \"ack\",\n              seen: \"local\",\n              marco,\n              versions: marco_versions,\n              conn: m.origin,\n            });\n        } else add_full_ack_leaves(marco);\n      }\n    }\n\n    /// ## message global `ack`\n    /// Sent after an originating peer has received a local `ack` over all its connections, or after any peer receives a global `ack`, so that everyone may come to know that this version has been seen by everyone in this peer group.\n    /// ``` js\n    /// {cmd: 'ack', seen: 'global', version: 'VERSION_ID', conn: 'CONN_ID'}\n    /// ```\n    if (cmd == \"ack\" && seen == \"global\") {\n      let m = self.marcos[marco];\n\n      if (!m || m.cancelled) return;\n\n      let t = get_time() - m.time2;\n      let weight = 0.1;\n      self.marco_time_est_2 =\n        weight * t + (1 - weight) * self.marco_time_est_2;\n\n      if (m.real_marco && Object.keys(self.marco_map[m.key]).length == 1) {\n        self.marco_current_wait_time *= 0.8;\n      }\n\n      add_full_ack_leaves(marco, conn);\n    }\n    function add_full_ack_leaves(marco, conn) {\n      let m = self.marcos[marco];\n      if (!m || m.cancelled) return;\n      m.cancelled = true;\n\n      for (let [c, cc] of Object.entries(self.conns))\n        if (c != conn && cc.seq <= m.seq)\n          send({\n            cmd: \"ack\",\n            seen: \"global\",\n            marco,\n            versions: marco_versions,\n            conn: c,\n          });\n\n      for (let v of Object.keys(m.versions)) {\n        if (!self.T[v]) continue;\n        let marks = {};\n        let f = (v) => {\n          if (!marks[v]) {\n            marks[v] = true;\n            delete self.acked_boundary[v];\n            Object.keys(self.T[v]).forEach(f);\n          }\n        };\n        f(v);\n        self.acked_boundary[v] = true;\n      }\n      prune(false, m.seq);\n    }\n\n    if (added_versions.length || fissures_forward.length) {\n      for (let c of Object.keys(self.conns))\n        if (c != conn)\n          send({\n            cmd: added_versions.length ? \"welcome\" : \"fissure\",\n            ...(added_versions.length ? { versions: added_versions } : {}),\n            fissures: copy_fissures(fissures_forward),\n            conn: c,\n          });\n    }\n\n    if (fissures_forward.length) resolve_fissures();\n\n    if (\n      !self.marco_timeout &&\n      cmd != \"set\" &&\n      cmd != \"marco\" &&\n      prune(true)\n    ) {\n      if (!self.marco_current_wait_time) {\n        self.marco_current_wait_time =\n          4 * (self.marco_time_est_1 + self.marco_time_est_2);\n      }\n\n      let t = Math.random() * self.marco_current_wait_time;\n\n      self.marco_timeout = set_timeout(() => {\n        self.marco_increases_allowed = 1;\n        self.marco_timeout = null;\n        if (prune(true)) self.marco();\n      }, t);\n    }\n\n    if (cmd == \"welcome\" && peer == null && prune(true, null, true))\n      self.marco();\n\n    return rebased_patches;\n  };\n\n  /// # antimatter_crdt.get(conn) or connect(conn)\n  ///\n  /// Register a new connection with id `conn` – triggers this antimatter_crdt object to send a `get` message over the given connection. \n  ///\n  /// ``` js\n  /// alice_antimatter_crdt.get('connection_to_bob')\n  /// ```\n  self.get = (conn) => {\n    self.proto_conns[conn] = true;\n    send({ cmd: \"get\", peer: self.id, conn });\n  };\n  self.connect = self.get;\n\n  /// # antimatter_crdt.forget(conn)\n  ///\n  /// Disconnect the given connection without creating a fissure – we don't need to reconnect with them.. it seems.. if we do, then we need to call `disconnect` instead, which will create a fissure allowing us to reconnect.\n  ///\n  /// ``` js\n  /// alice_antimatter_crdt.forget('connection_to_bob')\n  /// ```\n  self.forget = async (conn) => {\n    await new Promise((done) => {\n      if (self.conns[conn] != null) {\n        self.forget_cbs[conn] = done;\n        send({ cmd: \"forget\", conn });\n      }\n      self.disconnect(conn, false);\n    });\n  };\n\n  /// # antimatter_crdt.disconnect(conn)\n  ///\n  /// If we detect that a connection has closed, let the antimatter_crdt object know by calling this method with the given connection id – this will create a fissure so we can reconnect with whoever was on the other end of the connection later on. \n  ///\n  /// ``` js\n  /// alice_antimatter_crdt.disconnect('connection_to_bob')\n  /// ```\n  self.disconnect = (conn, fissure = true) => {\n    if (self.conns[conn] == null && !self.proto_conns[conn]) return;\n    delete self.proto_conns[conn];\n\n    if (self.conns[conn]) {\n      let peer = self.conns[conn].peer;\n      delete self.conns[conn];\n\n      if (fissure) {\n        fissure = create_fissure(peer, conn);\n        if (fissure) self.receive({ cmd: \"fissure\", fissure });\n      }\n    }\n  };\n\n  /// # antimatter_crdt.set(...patches)\n  ///\n  /// Modify this antimatter_crdt object by applying the given patches. Each patch looks like `{range: '.life.meaning', content: 42}`. Calling this method will trigger calling the `send` callback to let our peers know about this change. \n  ///\n  /// ``` js\n  /// antimatter_crdt.set({\n  ///   range: '.life.meaning',\n  ///   content: 42\n  /// })\n  /// ```\n  self.set = (...patches) => {\n    let version = `${self.next_seq++}@${self.id}`;\n    self.receive({\n      cmd: \"set\",\n      version,\n      parents: { ...self.current_version },\n      patches,\n      marco: Math.random().toString(36).slice(2),\n    });\n    return version;\n  };\n\n  /// # antimatter_crdt.marco()\n  ///\n  /// Initiate sending a `marco` message to try and establish whether certain versions can be pruned. \n  ///\n  /// ``` js\n  /// antimatter_crdt.marco()\n  /// ```\n  self.marco = () => {\n    let versions = { ...self.current_version };\n    Object.keys(versions).forEach((v) =>\n      self.version_groups[v] && self.version_groups[v].forEach((v) => (versions[v] = true))\n    );\n\n    let marco = Math.random().toString(36).slice(2);\n    self.receive({ cmd: \"marco\", marco, versions });\n    return marco;\n  };\n\n  function cancel_marcos() {\n    for (let m of Object.values(self.marcos)) m.cancelled = true;\n  }\n\n  function create_fissure(peer, conn) {\n    let ack_versions = self.ancestors(self.acked_boundary);\n\n    let entries = Object.keys(self.T)\n      .filter((v) => !ack_versions[v] || self.acked_boundary[v])\n      .map((v) => [v, true]);\n    if (!entries.length) return;\n    let versions = Object.fromEntries(entries);\n    return { a: self.id, b: peer, conn, versions, time: get_time() };\n  }\n\n  function resolve_fissures() {\n    let unfissured = {};\n\n    Object.entries(self.fissures).forEach(([fk, f]) => {\n      let other_key = f.b + \":\" + f.a + \":\" + f.conn;\n      let other = self.fissures[other_key];\n      if (other) {\n        if (Object.keys(f.versions).length) {\n          for (let v of Object.keys(f.versions)) unfissured[v] = true;\n          self.fissures[fk] = { ...f, versions: {} };\n        }\n        if (Object.keys(other.versions).length) {\n          for (let v of Object.keys(other.versions)) unfissured[v] = true;\n          self.fissures[other_key] = { ...other, versions: {} };\n        }\n      }\n    });\n\n    if (Object.keys(unfissured).length) {\n      cancel_marcos();\n\n      let ack_versions = self.ancestors(self.acked_boundary);\n      let unfissured_descendants = self.descendants(unfissured, true);\n      for (let un of Object.keys(unfissured_descendants))\n        if (ack_versions[un]) delete ack_versions[un];\n      self.acked_boundary = self.get_leaves(ack_versions);\n    }\n  }\n\n  function prune(just_checking, t, just_versions) {\n    if (just_checking) t = Infinity;\n\n    let fissures = just_checking ? { ...self.fissures } : self.fissures;\n\n    Object.entries(fissures).forEach((x) => {\n      let other_key = x[1].b + \":\" + x[1].a + \":\" + x[1].conn;\n      let other = fissures[other_key];\n      if (other && x[1].t <= t && other.t <= t) {\n        delete fissures[x[0]];\n        delete fissures[other_key];\n      }\n    });\n\n    if (self.fissure_lifetime != null) {\n      let now = get_time();\n      Object.entries(fissures).forEach(([k, f]) => {\n        if (f.time == null) f.time = now;\n        if (f.time <= now - self.fissure_lifetime) {\n          delete fissures[k];\n        }\n      });\n    }\n\n    if (\n      just_checking &&\n      !just_versions &&\n      Object.keys(fissures).length < Object.keys(self.fissures).length\n    )\n      return true;\n\n    let restricted = {};\n\n    Object.values(fissures).forEach((f) => {\n      Object.keys(f.versions).forEach((v) => (restricted[v] = true));\n    });\n\n    if (!just_checking) {\n      let acked = self.ancestors(self.acked_boundary);\n      Object.keys(self.T).forEach((x) => {\n        if (!acked[x]) restricted[x] = true;\n      });\n    }\n\n    let children = self.get_child_map();\n    let { parent_sets, child_sets } = get_parent_and_child_sets(children);\n\n    let to_bubble = {};\n    function mark_bubble(v, bubble) {\n      if (to_bubble[v]) return;\n      to_bubble[v] = bubble;\n      for (let vv of Object.keys(self.T[v])) mark_bubble(vv, bubble);\n    }\n    let visited = {};\n    function f(cur) {\n      if (!self.T[cur] || visited[cur]) return;\n      visited[cur] = true;\n\n      if (\n        to_bubble[cur] == null &&\n        parent_sets[cur] &&\n        !parent_sets[cur].done\n      ) {\n        parent_sets[cur].done = true;\n        let bottom = parent_sets[cur].members;\n        let top = find_one_bubble(bottom, children, child_sets, restricted);\n        if (top) {\n          if (just_checking) return true;\n          let bottom_array = Object.keys(bottom).sort();\n          let top_array = Object.keys(top);\n          raw_add_version_group(bottom_array);\n          let bubble = [bottom_array[0], top_array[0]];\n          for (let v of top_array) to_bubble[v] = bubble;\n          for (let v of bottom_array) mark_bubble(v, bubble);\n        }\n      }\n      if (to_bubble[cur] == null) {\n        let top = find_one_bubble(\n          { [cur]: true },\n          children,\n          child_sets,\n          restricted\n        );\n        if (top && !top[cur]) {\n          if (just_checking) return true;\n          let bubble = [cur, Object.keys(top)[0]];\n          for (let v of Object.keys(top)) to_bubble[v] = bubble;\n          mark_bubble(bubble[0], bubble);\n        } else {\n          to_bubble[cur] = [cur, cur];\n        }\n      }\n      return Object.keys(\n        self.T[cur] || self.T[self.version_groups[cur][0]]\n      ).some(f);\n    }\n    if (Object.keys(self.current_version).some(f) && just_checking)\n      return true;\n\n    self.apply_bubbles(to_bubble);\n\n    for (let [k, m] of Object.entries(self.marcos)) {\n      let vs = Object.keys(m.versions);\n      if (\n        !vs.length ||\n        !vs.every((v) => self.T[v] || self.version_groups[v])\n      ) {\n        delete self.marcos[k];\n        delete self.marco_map[m.key][m.id];\n        if (!Object.keys(self.marco_map[m.key]).length)\n          delete self.marco_map[m.key];\n      }\n    }\n\n    for (let [v, vs] of Object.entries(self.version_groups)) {\n      if (!self.T[vs[0]]) delete self.version_groups[v];\n    }\n  }\n\n  return self;\n};\n"
  },
  {
    "path": "antimatter_ts/src/json_crdt.ts",
    "content": "/// - *json_crdt*: created using `create_json_crdt`, this object is a pruneable\n///   JSON CRDT — \"JSON\" meaning it represents an arbitrary JSON datstructure, and\n///   \"CRDT\" and \"pruneable\" having the same meaning as for sequence_crdt below. The\n///   json_crdt makes recursive use of sequence_crdt structures to represent\n///   arbitrary JSON (for instance, a map is represented with a sequence_crdt\n///   structure for each value, where the first element in the sequence is the\n///   value).\n\nimport {\n  create_node as sequence_crdt_create_node,\n  generate_braid as sequence_crdt_generate_braid,\n  apply_bubbles as sequence_crdt_apply_bubbles,\n  get as sequence_crdt_get,\n  set as sequence_crdt_set,\n  length as sequence_crdt_length,\n  break_node as sequence_crdt_break_node,\n  add_version as sequence_crdt_add_version,\n  traverse as sequence_crdt_traverse,\n} from \"./sequence_crdt.ts\";\n\n\n/// ## create_json_crdt([init])\n///\n/// Create a new `json_crdt` object (or start with `init`, and add stuff to that). \n///\n/// ``` js\n/// let json_crdt = create_json_crdt()\n/// ``` \nexport const create_json_crdt = (self) => {\n    self = self || {};\n    self.S = self.S || null;\n    self.T = self.T || {};\n    self.root_version = null;\n    self.current_version = self.current_version || {};\n    self.version_cache = self.version_cache || {};\n  \n    let is_lit = (x) => !x || typeof x != \"object\" || x.t == \"lit\";\n    let get_lit = (x) => (x && typeof x == \"object\" && x.t == \"lit\" ? x.S : x);\n    let make_lit = (x) => (x && typeof x == \"object\" ? { t: \"lit\", S: x } : x);\n    self = self || {};\n  \n    /// # json_crdt.read()\n    ///\n    /// Returns an instance of the `json` object represented by this json_crdt data-structure. \n    ///\n    /// ``` js\n    /// console.log(json_crdt.read())\n    /// ```\n    self.read = (is_anc) => {\n      if (!is_anc) is_anc = () => true;\n  \n      return raw_read(self.S, is_anc);\n    };\n  \n    function raw_read(x, is_anc) {\n      if (x && typeof x == \"object\") {\n        if (x.t == \"lit\") return JSON.parse(JSON.stringify(x.S));\n        if (x.t == \"val\")\n          return raw_read(sequence_crdt_get(x.S, 0, is_anc), is_anc);\n        if (x.t == \"obj\") {\n          let o = {};\n          Object.entries(x.S).forEach(([k, v]) => {\n            let x = raw_read(v, is_anc);\n            if (x != null) o[k] = x;\n          });\n          return o;\n        }\n        if (x.t == \"arr\") {\n          let a = [];\n          sequence_crdt_traverse(\n            x.S,\n            is_anc,\n            (node, _, __, ___, ____, deleted) => {\n              if (!deleted)\n                node.elems.forEach((e) => a.push(raw_read(e, is_anc)));\n            },\n            true\n          );\n          return a;\n        }\n        if (x.t == \"str\") {\n          let s = [];\n          sequence_crdt_traverse(\n            x.S,\n            is_anc,\n            (node, _, __, ___, ____, deleted) => {\n              if (!deleted) s.push(node.elems);\n            },\n            true\n          );\n          return s.join(\"\");\n        }\n        throw Error(\"bad\");\n      }\n      return x;\n    }\n  \n    /// # json_crdt.generate_braid(versions)\n    ///\n    /// Returns an array of `set` messages that each look like this: `{version, parents, patches, sort_keys}`, such that if we pass all these messages to `antimatter_crdt.receive()`, we'll reconstruct the data in this `json_crdt` data-structure, assuming the recipient already has the given `versions` (each version is represented as an object with a version, and each value is `true`).\n    ///\n    /// ``` js\n    /// json_crdt.generate_braid({\n    ///   alice2: true, \n    ///   bob3: true\n    /// })\n    /// ```\n    self.generate_braid = (versions) => {\n      let anc =\n        versions && Object.keys(versions).length\n          ? self.ancestors(versions, true)\n          : {};\n      let is_anc = (x) => anc[x];\n  \n      if (Object.keys(self.T).length === 0) return [];\n  \n      return Object.entries(self.version_cache)\n        .filter((x) => !is_anc(x[0]))\n        .map(([version, set_message]) => {\n          return (self.version_cache[version] =\n            set_message || generate_set_message(version));\n        });\n  \n      function generate_set_message(version) {\n        if (!Object.keys(self.T[version]).length) {\n          return {\n            version,\n            parents: {},\n            patches: [{ range: \"\", content: self.read((v) => v == version) }],\n          };\n        }\n  \n        let is_lit = (x) => !x || typeof x !== \"object\" || x.t === \"lit\";\n        let get_lit = (x) =>\n          x && typeof x === \"object\" && x.t === \"lit\" ? x.S : x;\n  \n        let ancs = self.ancestors({ [version]: true });\n        delete ancs[version];\n        let is_anc = (x) => ancs[x];\n        let path = [];\n        let patches = [];\n        let sort_keys = {};\n        recurse(self.S);\n        function recurse(x) {\n          if (is_lit(x)) {\n          } else if (x.t === \"val\") {\n            sequence_crdt_generate_braid(x.S, version, is_anc, raw_read)\n              .forEach((s) => {\n                if (s[2].length) {\n                  patches.push({ range: path.join(\"\"), content: s[2][0] });\n                  if (s[3]) sort_keys[patches.length - 1] = s[3];\n                }\n              });\n            sequence_crdt_traverse(x.S, is_anc, (node) => {\n              node.elems.forEach(recurse);\n            });\n          } else if (x.t === \"arr\") {\n            sequence_crdt_generate_braid(x.S, version, is_anc).forEach((s) => {\n              patches.push({\n                range: `${path.join(\"\")}[${s[0]}:${s[0] + s[1]}]`,\n                content: s[2],\n              });\n              if (s[3]) sort_keys[patches.length - 1] = s[3];\n            });\n            let i = 0;\n            sequence_crdt_traverse(x.S, is_anc, (node) => {\n              node.elems.forEach((e) => {\n                path.push(`[${i++}]`);\n                recurse(e);\n                path.pop();\n              });\n            });\n          } else if (x.t === \"obj\") {\n            Object.entries(x.S).forEach((e) => {\n              path.push(\"[\" + JSON.stringify(e[0]) + \"]\");\n              recurse(e[1]);\n              path.pop();\n            });\n          } else if (x.t === \"str\") {\n            sequence_crdt_generate_braid(x.S, version, is_anc).forEach((s) => {\n              patches.push({\n                range: `${path.join(\"\")}[${s[0]}:${s[0] + s[1]}]`,\n                content: s[2],\n              });\n              if (s[3]) sort_keys[patches.length - 1] = s[3];\n            });\n          }\n        }\n  \n        return {\n          version,\n          parents: { ...self.T[version] },\n          patches,\n          sort_keys,\n        };\n      }\n    };\n  \n    /// # json_crdt.apply_bubbles(to_bubble)\n    ///\n    /// This method helps prune away meta data and compress stuff when we have determined that certain versions can be renamed to other versions – these renamings are expressed in `to_bubble`, where keys are versions and values are \"bubbles\", each bubble is represented with an array of two elements, the first element is the \"bottom\" of the bubble, and the second element is the \"top\" of the bubble. We will use the \"bottom\" as the new name for the version, and we'll use the \"top\" as the new parents.\n    ///\n    /// ``` js \n    /// json_crdt.apply_bubbles({\n    ///   alice4: ['bob5', 'alice4'], \n    ///   bob5: ['bob5', 'alice4']\n    /// }) \n    /// ```\n    self.apply_bubbles = (to_bubble) => {\n      function recurse(x) {\n        if (is_lit(x)) return x;\n        if (x.t == \"val\") {\n          sequence_crdt_apply_bubbles(x.S, to_bubble);\n          sequence_crdt_traverse(\n            x.S,\n            () => true,\n            (node) => {\n              node.elems = node.elems.slice(0, 1).map(recurse);\n            },\n            true\n          );\n          if (\n            x.S.nexts.length == 0 &&\n            !x.S.next &&\n            x.S.elems.length == 1 &&\n            is_lit(x.S.elems[0])\n          )\n            return x.S.elems[0];\n          return x;\n        }\n        if (x.t == \"arr\") {\n          sequence_crdt_apply_bubbles(x.S, to_bubble);\n          sequence_crdt_traverse(\n            x.S,\n            () => true,\n            (node) => {\n              node.elems = node.elems.map(recurse);\n            },\n            true\n          );\n          if (\n            x.S.nexts.length == 0 &&\n            !x.S.next &&\n            x.S.elems.every(is_lit) &&\n            !Object.keys(x.S.deleted_by).length\n          )\n            return { t: \"lit\", S: x.S.elems.map(get_lit) };\n          return x;\n        }\n        if (x.t == \"obj\") {\n          Object.entries(x.S).forEach((e) => {\n            let y = (x.S[e[0]] = recurse(e[1]));\n            if (y == null) delete x.S[e[0]];\n          });\n          if (Object.values(x.S).every(is_lit)) {\n            let o = {};\n            Object.entries(x.S).forEach((e) => (o[e[0]] = get_lit(e[1])));\n            return { t: \"lit\", S: o };\n          }\n          return x;\n        }\n        if (x.t == \"str\") {\n          sequence_crdt_apply_bubbles(x.S, to_bubble);\n          if (\n            x.S.nexts.length == 0 &&\n            !x.S.next &&\n            !Object.keys(x.S.deleted_by).length\n          )\n            return x.S.elems;\n          return x;\n        }\n      }\n      self.S = recurse(self.S);\n  \n      Object.entries(to_bubble).forEach(([version, bubble]) => {\n        if (!self.T[version]) return;\n  \n        self.my_where_are_they_now[version] = bubble[0];\n  \n        if (version === bubble[1]) self.T[bubble[0]] = self.T[bubble[1]];\n  \n        if (version !== bubble[0]) {\n          if (self.root_version == version) self.root_version = bubble[0];\n          delete self.T[version];\n          delete self.version_cache[version];\n          delete self.acked_boundary[version];\n          delete self.current_version[version];\n          if (\n            self.version_groups[version] &&\n            self.version_groups[version][0] == version\n          ) {\n            for (let v of self.version_groups[version]) {\n              delete self.version_groups[v];\n            }\n          }\n          for (let [k, parents] of Object.entries(self.T)) {\n            self.T[k] = parents = { ...parents };\n            for (let p of Object.keys(parents)) {\n              if (p == version) delete parents[p];\n            }\n          }\n        } else self.version_cache[version] = null;\n      });\n  \n      let leaves = Object.keys(self.current_version);\n      let acked_boundary = Object.keys(self.acked_boundary);\n      let fiss = Object.keys(self.fissures);\n      if (\n        leaves.length == 1 &&\n        acked_boundary.length == 1 &&\n        leaves[0] == acked_boundary[0] &&\n        fiss.length == 0\n      ) {\n        self.T = { [leaves[0]]: {} };\n        self.S = make_lit(self.read());\n      }\n    };\n  \n    /// # json_crdt.add_version(version, parents, patches[, sort_keys])\n    ///\n    /// The main method for modifying a `json_crdt` data structure. \n    ///\n    /// * `version`: Unique string associated with this edit. \n    /// * `parents`: A set of versions that this version is aware of, represented as a map with versions as keys, and values of `true`. \n    /// * `patches`: An array of patches, each patch looks like this `{range: '.life.meaning', content: 42}`. \n    /// * `sort_keys`: (optional) An object where each key is an index, and the value is a sort_key to use with the patch at the given index in the `patches` array – a sort_key overrides the version for a patch for the purposes of sorting. This can be useful after doing some pruning. \n    ///\n    /// ``` js\n    /// json_crdt.add_version(\n    ///   'alice6', \n    ///   {\n    ///     alice5: true, \n    ///     bob7: true\n    ///   }, \n    ///   [\n    ///     {\n    ///       range: '.a.b', \n    ///       content: 'c'\n    ///     }\n    ///   ]\n    /// )\n    /// ``` \n    self.add_version = (version, parents, patches, sort_keys) => {\n      if (self.T[version]) return;\n  \n      if (self.root_version == null) self.root_version = version;\n  \n      self.T[version] = { ...parents };\n  \n      self.version_cache[version] = JSON.parse(\n        JSON.stringify({\n          version,\n          parents,\n          patches,\n          sort_keys,\n        })\n      );\n  \n      Object.keys(parents).forEach((k) => {\n        if (self.current_version[k]) delete self.current_version[k];\n      });\n      self.current_version[version] = true;\n  \n      if (!sort_keys) sort_keys = {};\n  \n      if (!Object.keys(parents).length) {\n        let parse = self.parse_patch(patches[0]);\n        self.S = make_lit(parse.value);\n        return patches;\n      }\n  \n      let is_anc;\n      if (parents == self.current_version) {\n        is_anc = (_version) => _version != version;\n      } else {\n        let ancs = self.ancestors(parents);\n        is_anc = (_version) => ancs[_version];\n      }\n  \n      let rebased_patches = [];\n      patches.forEach((patch, i) => {\n        let sort_key = sort_keys[i];\n        let parse = self.parse_patch(patch);\n        let cur = resolve_path(parse);\n        if (!parse.slice) {\n          if (cur.t != \"val\") throw Error(\"bad\");\n          let len = sequence_crdt_length(cur.S, is_anc);\n          sequence_crdt_add_version(\n            cur.S,\n            version,\n            [[0, len, [parse.delete ? null : make_lit(parse.value)], sort_key]],\n            is_anc\n          );\n          rebased_patches.push(patch);\n        } else {\n          if (typeof parse.value === \"string\" && cur.t !== \"str\")\n            throw Error(\n              `Cannot splice string ${JSON.stringify(\n                parse.value\n              )} into non-string`\n            );\n          if (parse.value instanceof Array && cur.t !== \"arr\")\n            throw Error(\n              `Cannot splice array ${JSON.stringify(\n                parse.value\n              )} into non-array`\n            );\n          if (parse.value instanceof Array)\n            parse.value = parse.value.map((x) => make_lit(x));\n  \n          let r0 = parse.slice[0];\n          let r1 = parse.slice[1];\n          if (r0 < 0 || Object.is(r0, -0) || r1 < 0 || Object.is(r1, -0)) {\n            let len = sequence_crdt_length(cur.S, is_anc);\n            if (r0 < 0 || Object.is(r0, -0)) r0 = len + r0;\n            if (r1 < 0 || Object.is(r1, -0)) r1 = len + r1;\n          }\n  \n          let rebased_splices = sequence_crdt_add_version(\n            cur.S,\n            version,\n            [[r0, r1 - r0, parse.value, sort_key]],\n            is_anc\n          );\n          for (let rebased_splice of rebased_splices)\n            rebased_patches.push({\n              range: `${parse.path\n                .map((x) => `[${JSON.stringify(x)}]`)\n                .join(\"\")}[${rebased_splice[0]}:${rebased_splice[0] + rebased_splice[1]\n                }]`,\n              content: rebased_splice[2],\n            });\n        }\n      });\n  \n      function resolve_path(parse) {\n        let cur = self.S;\n        if (!cur || typeof cur != \"object\" || cur.t == \"lit\")\n          cur = self.S = {\n            t: \"val\",\n            S: sequence_crdt_create_node(self.root_version, [cur]),\n          };\n        let prev_S = null;\n        let prev_i = 0;\n        for (let i = 0; i < parse.path.length; i++) {\n          let key = parse.path[i];\n          if (cur.t == \"val\")\n            cur = sequence_crdt_get((prev_S = cur.S), (prev_i = 0), is_anc);\n          if (cur.t == \"lit\") {\n            let new_cur = {};\n            if (cur.S instanceof Array) {\n              new_cur.t = \"arr\";\n              new_cur.S = sequence_crdt_create_node(\n                self.root_version,\n                cur.S.map((x) => make_lit(x))\n              );\n            } else {\n              if (typeof cur.S != \"object\") throw Error(\"bad\");\n              new_cur.t = \"obj\";\n              new_cur.S = {};\n              Object.entries(cur.S).forEach(\n                (e) => (new_cur.S[e[0]] = make_lit(e[1]))\n              );\n            }\n            cur = new_cur;\n            sequence_crdt_set(prev_S, prev_i, cur, is_anc);\n          }\n          if (cur.t == \"obj\") {\n            let x = cur.S[key];\n            if (!x || typeof x != \"object\" || x.t == \"lit\")\n              x = cur.S[key] = {\n                t: \"val\",\n                S: sequence_crdt_create_node(self.root_version, [\n                  x == null ? null : x,\n                ]),\n              };\n            cur = x;\n          } else if (i == parse.path.length - 1 && !parse.slice) {\n            parse.slice = [key, key + 1];\n            parse.value = cur.t == \"str\" ? parse.value : [parse.value];\n          } else if (cur.t == \"arr\") {\n            cur = sequence_crdt_get((prev_S = cur.S), (prev_i = key), is_anc);\n          } else throw Error(\"bad\");\n        }\n        if (parse.slice) {\n          if (cur.t == \"val\")\n            cur = sequence_crdt_get((prev_S = cur.S), (prev_i = 0), is_anc);\n          if (typeof cur == \"string\") {\n            cur = {\n              t: \"str\",\n              S: sequence_crdt_create_node(self.root_version, cur),\n            };\n            sequence_crdt_set(prev_S, prev_i, cur, is_anc);\n          } else if (cur.t == \"lit\") {\n            if (!(cur.S instanceof Array)) throw Error(\"bad\");\n            cur = {\n              t: \"arr\",\n              S: sequence_crdt_create_node(\n                self.root_version,\n                cur.S.map((x) => make_lit(x))\n              ),\n            };\n            sequence_crdt_set(prev_S, prev_i, cur, is_anc);\n          }\n        }\n        return cur;\n      }\n  \n      return rebased_patches;\n    };\n  \n    /// # json_crdt.get_child_map()\n    ///\n    /// Returns a map where each key is a version, and each value is a set of child versions, represented as a map with versions as keys, and values of `true`.\n    ///\n    /// ``` js\n    /// json_crdt.get_child_map()\n    /// ``` \n    self.get_child_map = () => {\n      let children = {};\n      Object.entries(self.T).forEach(([v, parents]) => {\n        Object.keys(parents).forEach((parent) => {\n          if (!children[parent]) children[parent] = {};\n          children[parent][v] = true;\n        });\n      });\n      return children;\n    };\n  \n    /// # json_crdt.ancestors(versions, ignore_nonexistent=false)\n    ///\n    /// Gather `versions` and all their ancestors into a set. `versions` is a set of versions, i.e. a map with version-keys and values of true – we'll basically return a larger set. If `ignore_nonexistent` is `true`, then we won't throw an exception if we encounter a version that we don't have in our data-structure.\n    ///\n    /// ``` js\n    /// json_crdt.ancestors({\n    ///   alice12: true, \n    ///   bob10: true\n    /// }) \n    /// ``` \n    self.ancestors = (versions, ignore_nonexistent) => {\n      let result = {};\n      function recurse(version) {\n        if (result[version]) return;\n        if (!self.T[version]) {\n          if (ignore_nonexistent) return;\n          throw Error(`The version ${version} no existo`);\n        }\n        result[version] = true;\n        Object.keys(self.T[version]).forEach(recurse);\n      }\n      Object.keys(versions).forEach(recurse);\n      return result;\n    };\n  \n    /// # json_crdt.descendants(versions, ignore_nonexistent=false)\n    ///\n    /// Gather `versions` and all their descendants into a set. `versions` is a set of versions, i.e. a map with version-keys and values of true – we'll basically return a larger set. If `ignore_nonexistent` is `true`, then we won't throw an exception if we encounter a version that we don't have in our data-structure.\n    ///\n    /// ``` js\n    /// json_crdt.descendants({\n    ///   alice12: true, \n    ///   bob10: true\n    /// }) \n    /// ``` \n    self.descendants = (versions, ignore_nonexistent) => {\n      let children = self.get_child_map();\n      let result = {};\n      function recurse(version) {\n        if (result[version]) return;\n        if (!self.T[version]) {\n          if (ignore_nonexistent) return;\n          throw Error(`The version ${version} no existo`);\n        }\n        result[version] = true;\n        Object.keys(children[version] || {}).forEach(recurse);\n      }\n      Object.keys(versions).forEach(recurse);\n      return result;\n    };\n  \n    /// # json_crdt.get_leaves(versions)\n    ///\n    /// Returns a set of versions from `versions` which don't also have a child in `versions`. `versions` is itself a set of versions, represented as an object with version keys and `true` values, and the return value is represented the same way.\n    self.get_leaves = (versions) => {\n      let leaves = { ...versions };\n      Object.keys(versions).forEach((v) => {\n        Object.keys(self.T[v]).forEach((p) => delete leaves[p]);\n      });\n      return leaves;\n    };\n  \n    /// # json_crdt.parse_patch(patch)\n    ///\n    /// Takes a patch in the form `{range, content}`, and returns an object of the form `{path: [...], [slice: [...]], [delete: true], content}`; basically calling `parse_json_path` on `patch.range`, and adding `patch.content` along for the ride.\n    self.parse_patch = (patch) => {\n      let x = self.parse_json_path(patch.range);\n      x.value = patch.content;\n      return x;\n    };\n  \n    /// # json_crdt.parse_json_path(json_path)\n    ///\n    /// Parses the string `json_path` into an object like: `{path: [...], [slice: [...]], [delete: true]}`. \n    ///\n    /// * `a.b[3]` --> `{path: ['a', 'b', 3]}`\n    /// * `a.b[3:5]` --> `{path: ['a', 'b'], slice: [3, 5]}`\n    /// * `delete a.b` --> `{path: ['a', 'b'], delete: true}`\n    ///\n    /// ``` js\n    /// console.log(json_crdt.parse_json_path('a.b.c'))\n    /// ```\n    self.parse_json_path = (json_path) => {\n      let ret = { path: [] };\n      let re =\n        /^(delete)\\s+|\\.?([^\\.\\[ =]+)|\\[((\\-?\\d+)(:\\-?\\d+)?|\"(\\\\\"|[^\"])*\")\\]/g;\n      let m;\n      while ((m = re.exec(json_path))) {\n        if (m[1]) ret.delete = true;\n        else if (m[2]) ret.path.push(m[2]);\n        else if (m[3] && m[5])\n          ret.slice = [JSON.parse(m[4]), JSON.parse(m[5].substr(1))];\n        else if (m[3]) ret.path.push(JSON.parse(m[3]));\n      }\n      return ret;\n    };\n  \n    return self;\n  };\n  \n\n"
  },
  {
    "path": "antimatter_ts/src/sequence_crdt.ts",
    "content": "type Version = string;\n\ntype Node = {\n  /// globally unique string\n  version: Version,\n  /// a string or array representing actual data elements of the underlying sequence\n  elems: string | any[],\n  /// this is useful for dealing with replace operations\n  end_cap: any | undefined,\n  /// version to pretend this is for the purposes of sorting\n  sort_key: any | undefined,\n  /// if this node gets deleted, we'll mark it here\n  deleted_by: Record<string, any>,\n  /// array of nodes following this one\n  nexts: any[],\n  /// final node following this one (after all the nexts)\n  next: null | any,\n};\n\n/// # sequence_crdt_create_node(version, elems, [end_cap, sort_key])\n///\n/// Creates a node for a `sequence_crdt` sequence CRDT with the given properties. The resulting node will look like this:\n///\n/// let sequence_node = sequence_crdt_create_node('alice1', 'hello')\n/// ```\nconst sequence_crdt_create_node = (version: Version, elems: string | any[], end_cap: any = undefined, sort_key: any = undefined): Node => ({\n  version,\n  elems,\n  end_cap,\n  sort_key,\n  deleted_by: {},\n  nexts: [],\n  next: null,\n});\n\n/// # sequence_crdt_generate_braid(root_node, version, is_anc)\n///  \n/// Reconstructs an array of splice-information which can be passed to `sequence_crdt_add_version` in order to add `version` to another `sequence_crdt` instance – the returned array looks like: `[[insert_pos, delete_count, insert_elems, sort_key], ...]`. `is_anc` is a function which accepts a version string and returns `true` if and only if the given version is an ancestor of `version` (i.e. a version which the author of `version` knew about when they created that version).\n///\n/// ``` js\n/// let root_node = sequence_crdt_create_node('alice1', 'hello')\n/// console.log(sequence_crdt_generate_braid(root_node, 'alice1', x => false)) // outputs [0, 0, \"hello\"]\n/// ```\nconst sequence_crdt_generate_braid = (S: Node, version: Version, is_anc: (v: Version) => boolean, read_array_elements: (<T>(x: T, cb?: () => boolean) => T) | undefined = undefined) => {\n  if (!read_array_elements) read_array_elements = (x) => x;\n  let splices = [];\n\n  function add_ins(offset, ins, sort_key, end_cap, is_row_header) {\n    if (typeof ins !== \"string\")\n      ins = ins.map((x) => read_array_elements(x, () => false));\n    if (splices.length > 0) {\n      let prev = splices[splices.length - 1];\n      if (\n        prev[0] + prev[1] === offset &&\n        !end_cap &&\n        (!is_row_header || prev[3] == sort_key) &&\n        (prev[4] === \"i\" || (prev[4] === \"r\" && prev[1] === 0))\n      ) {\n        prev[2] = prev[2].concat(ins);\n        return;\n      }\n    }\n    splices.push([offset, 0, ins, sort_key, end_cap ? \"r\" : \"i\"]);\n  }\n\n  function add_del(offset, del, ins) {\n    if (splices.length > 0) {\n      let prev = splices[splices.length - 1];\n      if (prev[0] + prev[1] === offset && prev[4] !== \"i\") {\n        prev[1] += del;\n        return;\n      }\n    }\n    splices.push([offset, del, ins, null, \"d\"]);\n  }\n\n  let offset = 0;\n  function helper(node, _version, end_cap = undefined, is_row_header = undefined) {\n    if (_version === version) {\n      add_ins(\n        offset,\n        node.elems.slice(0),\n        node.sort_key,\n        end_cap,\n        is_row_header\n      );\n    } else if (node.deleted_by[version] && node.elems.length > 0) {\n      add_del(offset, node.elems.length, node.elems.slice(0, 0));\n    }\n\n    if (\n      (!_version || is_anc(_version)) &&\n      !Object.keys(node.deleted_by).some(is_anc)\n    ) {\n      offset += node.elems.length;\n    }\n\n    node.nexts.forEach((next) =>\n      helper(next, next.version, node.end_cap, true)\n    );\n    if (node.next) helper(node.next, _version);\n  }\n  helper(S, null);\n  splices.forEach((s) => {\n    // if we have replaces with 0 deletes,\n    // make them have at least 1 delete..\n    // this can happen when there are multiple replaces of the same text,\n    // and our code above will associate those deletes with only one of them\n    if (s[4] === \"r\" && s[1] === 0) s[1] = 1;\n  });\n  return splices;\n};\n\n/// # sequence_crdt_apply_bubbles(root_node, to_bubble)\n///\n/// This method helps prune away meta data and compress stuff when we have determined that certain versions can be renamed to other versions – these renamings are expressed in `to_bubble`, where keys are versions and values are \"bubbles\", each bubble is represented with an array of two elements, the first element is the \"bottom\" of the bubble, and the second element is the \"top\" of the bubble. We will use the \"bottom\" as the new name for the version, and we'll use the \"top\" as the new parents.\n/// \n/// ``` js\n/// sequence_crdt_apply_bubbles(root_node, {\n///   alice4: ['bob5', 'alice4'],\n///   bob5: ['bob5', 'alice4']\n/// })\n/// ```\nconst sequence_crdt_apply_bubbles = (S, to_bubble) => {\n  sequence_crdt_traverse(\n    S,\n    () => true,\n    (node) => {\n      if (\n        to_bubble[node.version] &&\n        to_bubble[node.version][0] != node.version\n      ) {\n        if (!node.sort_key) node.sort_key = node.version;\n        node.version = to_bubble[node.version][0];\n      }\n\n      for (let x of Object.keys(node.deleted_by)) {\n        if (to_bubble[x]) {\n          delete node.deleted_by[x];\n          node.deleted_by[to_bubble[x][0]] = true;\n        }\n      }\n    },\n    true\n  );\n\n  function set_nnnext(node, next) {\n    while (node.next) node = node.next;\n    node.next = next;\n  }\n\n  do_line(S, S.version);\n  function do_line(node, version) {\n    let prev = null;\n    while (node) {\n      if (node.nexts[0] && node.nexts[0].version == version) {\n        for (let i = 0; i < node.nexts.length; i++) {\n          delete node.nexts[i].version;\n          delete node.nexts[i].sort_key;\n          set_nnnext(\n            node.nexts[i],\n            i + 1 < node.nexts.length ? node.nexts[i + 1] : node.next\n          );\n        }\n        node.next = node.nexts[0];\n        node.nexts = [];\n      }\n\n      if (node.deleted_by[version]) {\n        node.elems = node.elems.slice(0, 0);\n        node.deleted_by = {};\n        if (prev) {\n          node = prev;\n          continue;\n        }\n      }\n\n      let next = node.next;\n\n      if (\n        !node.nexts.length &&\n        next &&\n        (!node.elems.length ||\n          !next.elems.length ||\n          (Object.keys(node.deleted_by).every((x) => next.deleted_by[x]) &&\n            Object.keys(next.deleted_by).every((x) => node.deleted_by[x])))\n      ) {\n        if (!node.elems.length) node.deleted_by = next.deleted_by;\n        node.elems = node.elems.concat(next.elems);\n        node.end_cap = next.end_cap;\n        node.nexts = next.nexts;\n        node.next = next.next;\n        continue;\n      }\n\n      if (next && !next.elems.length && !next.nexts.length) {\n        node.next = next.next;\n        continue;\n      }\n\n      for (let n of node.nexts) do_line(n, n.version);\n\n      prev = node;\n      node = next;\n    }\n  }\n};\n\n/// # sequence_crdt_get(root_node, i, is_anc)\n/// \n/// Returns the element at the `i`th position (0-based) in the `sequence_crdt` rooted at `root_node`, when only considering versions which result in `true` when passed to `is_anc`.\n/// \n/// ``` js\n/// let x = sequence_crdt_get(root_node, 2, {\n///     alice1: true\n/// })\n/// ```\nconst sequence_crdt_get = (S, i, is_anc) => {\n  let ret = null;\n  let offset = 0;\n  sequence_crdt_traverse(S, is_anc ? is_anc : () => true, (node) => {\n    if (i - offset < node.elems.length) {\n      ret = node.elems[i - offset];\n      return false;\n    }\n    offset += node.elems.length;\n  });\n  return ret;\n};\n\n/// # sequence_crdt_set(root_node, i, v, is_anc)\n/// \n/// Sets the element at the `i`th position (0-based) in the `sequence_crdt` rooted at `root_node` to the value `v`, when only considering versions which result in `true` when passed to `is_anc`.\n/// \n/// ``` js\n/// sequence_crdt_set(root_node, 2, 'x', {\n///   alice1: true\n/// })\n/// ```\nconst sequence_crdt_set = (S, i, v, is_anc) => {\n  let offset = 0;\n  sequence_crdt_traverse(S, is_anc ? is_anc : () => true, (node) => {\n    if (i - offset < node.elems.length) {\n      if (typeof node.elems == \"string\")\n        node.elems =\n          node.elems.slice(0, i - offset) +\n          v +\n          node.elems.slice(i - offset + 1);\n      else node.elems[i - offset] = v;\n      return false;\n    }\n    offset += node.elems.length;\n  });\n};\n\n/// # sequence_crdt_length(root_node, is_anc)\n/// \n/// Returns the length of the `sequence_crdt` rooted at `root_node`, when only considering versions which result in `true` when passed to `is_anc`.\n/// \n/// ``` js\n/// console.log(sequence_crdt_length(root_node, {\n///  alice1: true\n/// }))\n/// ```\nconst sequence_crdt_length = (S, is_anc) => {\n  let count = 0;\n  sequence_crdt_traverse(S, is_anc ? is_anc : () => true, (node) => {\n    count += node.elems.length;\n  });\n  return count;\n};\n\n/// # sequence_crdt_break_node(node, break_position, end_cap, new_next)\n/// \n/// This method breaks apart a `sequence_crdt` node into two nodes, each representing a subsequence of the sequence represented by the original node. The `node` parameter is modified into the first node, and the second node is returned. The first node represents the elements of the sequence before `break_position`, and the second node represents the rest of the elements. If `end_cap` is truthy, then the first node will have `end_cap` set – this is generally done if the elements in the second node are being replaced. This method will add `new_next` to the first node's `nexts` array.\n/// \n/// ``` js\n/// let node = sequence_crdt_create_node('alice1', 'hello') // node.elems == 'hello'\n/// let second = sequence_crdt_break_node(node, 2) // now node.elems == 'he', and second.elems == 'llo'\n/// ```\nconst sequence_crdt_break_node = (node, x, end_cap = undefined, new_next = undefined) => {\n  let tail = sequence_crdt_create_node(\n    null,\n    node.elems.slice(x),\n    node.end_cap\n  );\n  Object.assign(tail.deleted_by, node.deleted_by);\n  tail.nexts = node.nexts;\n  tail.next = node.next;\n\n  node.elems = node.elems.slice(0, x);\n  node.end_cap = end_cap;\n  node.nexts = new_next ? [new_next] : [];\n  node.next = tail;\n\n  return tail;\n};\n\n/// # sequence_crdt_add_version(root_node, version, splices, [is_anc])\n/// \n/// This is the main method in sequence_crdt, used to modify the sequence. The modification must be given a unique `version` string, and the modification itself is represented as an array of `splices`, where each splice looks like this: `[position, num_elements_to_delete, elements_to_insert, optional_sort_key]`. \n/// \n/// Note that all positions are relative to the original sequence, before any splices have been applied. Positions are counted by only considering nodes with versions which result in `true` when passed to `is_anc`. (and are not `deleted_by` any versions which return `true` when passed to `is_anc`).\n/// \n/// ``` js\n/// let node = sequence_crdt_create_node('alice1', 'hello') \n/// sequence_crdt_add_version(node, 'alice2', [[5, 0, ' world']], null, v => v == 'alice1') \n/// ```\nconst sequence_crdt_add_version = (S: Node, version: Version, splices, is_anc) => {\n  let rebased_splices = [];\n\n  function add_to_nexts(nexts: Node[], to: Node) {\n    let i = binarySearch(nexts, function (x: Node) {\n      if ((to.sort_key || to.version) < (x.sort_key || x.version)) return -1;\n      if ((to.sort_key || to.version) > (x.sort_key || x.version)) return 1;\n      return 0;\n    });\n    nexts.splice(i, 0, to);\n  }\n\n  let si = 0;\n  let delete_up_to = 0;\n\n  let process_patch = (node, offset, has_nexts, prev, _version, deleted) => {\n    let s = splices[si];\n    if (!s) return;\n    let sort_key = s[3];\n\n    if (deleted) {\n      if (s[1] == 0 && s[0] == offset) {\n        if (node.elems.length == 0 && !node.end_cap && has_nexts) return;\n        let new_node = sequence_crdt_create_node(\n          version,\n          s[2],\n          null,\n          sort_key\n        );\n\n        fresh_nodes.add(new_node);\n\n        if (node.elems.length == 0 && !node.end_cap)\n          add_to_nexts(node.nexts, new_node);\n        else sequence_crdt_break_node(node, 0, undefined, new_node);\n        si++;\n      }\n\n      if (\n        delete_up_to <= offset &&\n        s[1] &&\n        s[2] &&\n        s[0] == offset &&\n        node.end_cap &&\n        !has_nexts &&\n        (node.next && node.next.elems.length) &&\n        !Object.keys(node.next.deleted_by).some((version) => f(version))\n      ) {\n        delete_up_to = s[0] + s[1];\n\n        let new_node = sequence_crdt_create_node(\n          version,\n          s[2],\n          null,\n          sort_key\n        );\n\n        fresh_nodes.add(new_node);\n\n        add_to_nexts(node.nexts, new_node);\n      }\n\n      return;\n    }\n\n    if (s[1] == 0) {\n      let d = s[0] - (offset + node.elems.length);\n      if (d > 0) return;\n      if (d == 0 && !node.end_cap && has_nexts) return;\n      let new_node = sequence_crdt_create_node(version, s[2], null, sort_key);\n\n      fresh_nodes.add(new_node);\n\n      if (d == 0 && !node.end_cap) {\n        add_to_nexts(node.nexts, new_node);\n      } else {\n        sequence_crdt_break_node(node, s[0] - offset, undefined, new_node);\n      }\n      si++;\n      return;\n    }\n\n    if (delete_up_to <= offset) {\n      let d = s[0] - (offset + node.elems.length);\n\n      let add_at_end =\n        d == 0 &&\n        s[2] &&\n        node.end_cap &&\n        !has_nexts &&\n        (node.next && node.next.elems.length) &&\n        !Object.keys(node.next.deleted_by).some((version) => f(version));\n\n      if (d > 0 || (d == 0 && !add_at_end)) return;\n\n      delete_up_to = s[0] + s[1];\n\n      if (s[2]) {\n        let new_node = sequence_crdt_create_node(\n          version,\n          s[2],\n          null,\n          sort_key\n        );\n\n        fresh_nodes.add(new_node);\n\n        if (add_at_end) {\n          add_to_nexts(node.nexts, new_node);\n        } else {\n          sequence_crdt_break_node(node, s[0] - offset, true, new_node);\n        }\n        return;\n      } else {\n        if (s[0] == offset) {\n        } else {\n          sequence_crdt_break_node(node, s[0] - offset);\n          return;\n        }\n      }\n    }\n\n    if (delete_up_to > offset) {\n      if (delete_up_to <= offset + node.elems.length) {\n        if (delete_up_to < offset + node.elems.length) {\n          sequence_crdt_break_node(node, delete_up_to - offset);\n        }\n        si++;\n      }\n      node.deleted_by[version] = true;\n      return;\n    }\n  };\n\n  let f = is_anc || (() => true);\n  let offset = 0;\n  let rebase_offset = 0;\n  let fresh_nodes = new Set();\n  function traverse(node, prev, version) {\n    if (!version || f(version)) {\n      let has_nexts = node.nexts.find((next) => f(next.version));\n      let deleted = Object.keys(node.deleted_by).some((version) =>\n        f(version)\n      );\n      let rebase_deleted = Object.keys(node.deleted_by).length;\n      process_patch(node, offset, has_nexts, prev, version, deleted);\n\n      if (!deleted) offset += node.elems.length;\n      if (!rebase_deleted && Object.keys(node.deleted_by).length)\n        rebased_splices.push([rebase_offset, node.elems.length, \"\"]);\n    }\n    if (fresh_nodes.has(node))\n      rebased_splices.push([rebase_offset, 0, node.elems]);\n    if (!Object.keys(node.deleted_by).length)\n      rebase_offset += node.elems.length;\n\n    for (let next of node.nexts) traverse(next, null, next.version);\n    if (node.next) traverse(node.next, node, version);\n  }\n  traverse(S, null, S.version);\n\n  return rebased_splices;\n};\n\n/// # sequence_crdt_traverse(root_node, is_anc, callback, [view_deleted, tail_callback])\n/// \n/// Traverses the subset of nodes in the tree rooted at `root_node` whose versions return `true` when passed to `is_anc`. For each node, `callback` is called with these parameters: `node, offset, has_nexts, prev, version, deleted`, \n/// \n/// Where\n/// - `node` is the current node being traversed\n/// - `offset` says how many elements we have passed so far \n/// - `has_nexts` is true if some of this node's `nexts` will be traversed according to `is_anc`\n/// - `prev` is a pointer to the node whos `next` points to this one, or `null` if this is the root node\n/// - `version` is the version of this node, or this node's `prev` if our version is `null`, or that node's `prev` if it is also `null`, etc\n/// - `deleted` is true if this node is deleted according to `is_anc`\n/// \n/// Usually we skip deleted nodes when traversing, but we'll include them if `view_deleted` is `true`. \n/// \n/// `tail_callback` is an optional callback that will get called with a single parameter `node` after all of that node's children `nexts` and `next` have been traversed. \n/// \n/// ``` js\n/// sequence_crdt_traverse(node, () => true, node =>\n///   process.stdout.write(node.elems)) \n/// ```\nconst sequence_crdt_traverse = (S, f, cb, view_deleted = undefined, tail_cb = undefined) => {\n  let offset = 0;\n  function helper(node, prev, version) {\n    let has_nexts = node.nexts.find((next) => f(next.version));\n    let deleted = Object.keys(node.deleted_by).some((version) => f(version));\n    if (view_deleted || !deleted) {\n      if (cb(node, offset, has_nexts, prev, version, deleted) == false)\n        return true;\n      offset += node.elems.length;\n    }\n    for (let next of node.nexts)\n      if (f(next.version)) {\n        if (helper(next, null, next.version)) return true;\n      }\n    if (node.next) {\n      if (helper(node.next, node, version)) return true;\n    } else if (tail_cb) tail_cb(node);\n  }\n  helper(S, null, S.version);\n};\n\n// modified from https://stackoverflow.com/questions/22697936/binary-search-in-javascript\nfunction binarySearch<T>(ar: T[], compare_fn: (x: T) => number): number {\n  let m = 0;\n  let n = ar.length - 1;\n  while (m <= n) {\n    let k = (n + m) >> 1;\n    let cmp = compare_fn(ar[k]);\n    if (cmp > 0) {\n      m = k + 1;\n    } else if (cmp < 0) {\n      n = k - 1;\n    } else {\n      return k;\n    }\n  }\n  return m;\n}\n\n/// - *sequence_crdt*: methods to manipulate a pruneable sequence CRDT —\n///   \"sequence\" meaning it represents a javascript string or array, \"CRDT\" meaning\n///   this structure can be merged with other ones, and \"pruneable\" meaning that it\n///   supports an operation to remove meta-data when it is no longer needed (whereas\n///   CRDT's often keep track of this meta-data forever).\nexport {\n  sequence_crdt_create_node as create_node,\n  sequence_crdt_generate_braid as generate_braid,\n  sequence_crdt_apply_bubbles as apply_bubbles,\n  sequence_crdt_get as get,\n  sequence_crdt_set as set,\n  sequence_crdt_length as length,\n  sequence_crdt_break_node as break_node,\n  sequence_crdt_add_version as add_version,\n  sequence_crdt_traverse as traverse,\n};\n"
  },
  {
    "path": "antimatter_ts/test.html",
    "content": "<body></body>\n<script>\n\nlet real_random = Math.random\n\nfunction print(...args) {\n    let d = document.createElement('div')\n    let angle = real_random() * 360\n    d.style.background = `hsl(${angle},100%,${args[0]?.startsWith?.('i = ') ? 85 : 95}%)`\n    d.style.border = `3px solid hsl(${angle},100%,85%)`\n    d.style.display = 'grid'\n    d.style['grid-template-columns'] = '1fr '.repeat(args.length)\n    for (let a of args) {\n        if (typeof a == 'string') {\n            let dd = document.createElement('div')\n            dd.textContent = a\n            d.append(dd)\n        } else {\n            let dd = document.createElement('pre')\n            dd.style.fontSize = '50%'\n            dd.textContent = JSON.stringify(a, null, '    ')\n            d.append(dd)\n        }\n    }\n    document.body.append(d)\n}\n\nconsole.log = print\n\n</script>\n<script src=\"antimatter.js\"></script>\n<script src=\"random002.js\"></script>\n\n<script>\n\n;(async () => {\n\n    let best_seed = null\n    let best_n = Infinity\n    let last_n\n\n    for (let i = 0; i < 100; i++) {\n        let seed = 'BASE_' + i\n        let r = run_test(seed, false)\n        if (!r) {\n            console.log(`seed \"${seed}\" FAILED after ${last_n} steps`)\n            if (last_n < best_n) {\n                best_n = last_n\n                best_seed = seed\n            }\n        } else {\n            console.log(`seed \"${seed}\" passed after ${last_n} steps!`)\n        }\n\n        if (best_seed != null) {\n            console.log(`    (smallest failed seed: \"${best_seed}\", after ${best_n} steps)`)\n        }\n\n        await new Promise(done => setTimeout(done, 10))\n        document.body.scrollTop = document.body.scrollHeight\n    }\n    if (best_seed == null) console.log(`ALL PASSED!`)\n    document.body.scrollTop = document.body.scrollHeight\n\n    function run_test(seed, verbose) {\n        try {\n        Math.randomSeed(seed)\n\n        let num_peers = Math.floor(Math.random() * 5) + 1\n        let steps = Math.floor(Math.random() * 200)\n\n        last_n = 0\n\n        let peers = []\n        let conns = {}\n        let next_conn_id = 0\n\n        for (let i = 0; i < num_peers; i++) {\n            peers.push(antimatter.create(msg => {\n                let c = conns[msg.conn]\n\n                if (c?.[i]?.other == null) {\n                    debugger\n                }\n\n                if (verbose) console.log(`    send p${i}->p${c?.[i]?.other}(conn:${msg.conn}) msg:${msg.cmd}`)\n\n                c?.[c?.[i]?.other]?.q.push(msg)\n            }, {id: i}))\n        }\n\n        peers[0].set({range: '', content: ''})\n\n        for (let i = 0; i < steps; i++) {\n            if (verbose) console.log(`i = ${i}`)\n\n            last_n++\n\n            if (Math.random() < 1/3) {\n                // edit\n\n                let can_do = peers.filter(p => p.read() != null || p.id == 0)\n                let p = can_do[Math.floor(Math.random() * can_do.length)]\n                let text = p.read()\n\n                let start = Math.round(Math.random() * text.length)\n                let end = start + Math.round(Math.random() * (text.length - start))\n                let content = String.fromCharCode('a'.charCodeAt(0) + Math.floor(Math.random() * 26)).repeat(Math.floor(Math.random() * 4))\n\n                if (verbose) console.log(`edit p${p.id} [${start}:${end}]=${content}`)\n\n                let v = p.set({range: `[${start}:${end}]`, content})\n            } else if (Math.random() < 0.5) {\n                if (Math.random() < 0.5) {\n                    // connect\n\n                    if (peers.length > 1) {\n                        let p1 = peers[Math.floor(Math.random() * peers.length)]\n                        let p2 = p1\n                        while (p2 == p1) p2 = peers[Math.floor(Math.random() * peers.length)]\n\n                        let conn = next_conn_id++\n                        conns[conn] = {\n                            [p1.id]: {other: p2.id, q: []},\n                            [p2.id]: {other: p1.id, q: []}\n                        }\n\n                        if (verbose) console.log(`conn p${p1.id} -> p${p2.id} (conn:${conn})`)\n\n                        p1.get(conn)\n                    }\n                } else {\n                    // disconnect\n\n                    let conn_keys = Object.keys(conns)\n                    if (conn_keys.length) {\n                        let conn = conn_keys[Math.floor(Math.random() * conn_keys.length)]\n                        let c = conns[conn]\n                        let peer_keys = Object.keys(c)\n                        let p = peers[peer_keys[Math.floor(Math.random() * peer_keys.length)]]\n                        let other = c[p.id].other\n\n                        if (peer_keys.length == 1) delete conns[conn]\n                        else delete c[p.id]\n\n                        if (verbose) console.log(`diss p${p.id} (conn:${conn}, ${other})`)\n\n                        if (p.conns[conn] != null || p.proto_conns[conn]) p.disconnect(conn)\n                    }\n                }\n            } else {\n                // message pump\n\n                let conn_keys = Object.keys(conns)\n                if (conn_keys.length) {\n                    let conn = conn_keys[Math.floor(Math.random() * conn_keys.length)]\n                    let c = conns[conn]\n                    let peer_keys = Object.keys(c)\n                    let p = peers[peer_keys[Math.floor(Math.random() * peer_keys.length)]]\n\n                    let msg = c[p.id].q.shift()\n\n                    if (msg) {\n                        if (verbose) console.log(`recv p${p.id} (conn:${conn}) msg:${msg.cmd} :: ${JSON.stringify(msg)}`)\n\n                        p.receive(msg)\n                    }\n                }            \n            }\n\n            if (verbose) console.log(...peers.map(p => ({T: p.T, f: p.fissures})))\n        }\n\n        if (verbose) console.log(`----clean conns----`)\n        for (let [conn, c] of Object.entries(conns)) {\n            let peer_keys = Object.keys(c)\n            if (peer_keys.length < 2) {\n                let p = peers[peer_keys[0]]\n                let other = c[p.id].other\n\n                delete conns[conn]\n\n                if (verbose) console.log(`diss p${p.id} (conn:${conn})`)\n\n                if (p.conns[conn] != null || p.proto_conns[conn]) p.disconnect(conn)\n            }\n        }\n\n        if (verbose) console.log(`----conn all----`)\n        for (let i = 1; i < peers.length; i++) {\n            let p1 = peers[i]\n            let p2 = peers[Math.floor(Math.random() * i)]\n\n            let conn = next_conn_id++\n            conns[conn] = {\n                [p1.id]: {other: p2.id, q: []},\n                [p2.id]: {other: p1.id, q: []}\n            }\n\n            if (verbose) console.log(`conn p${p1.id} -> p${p2.id} (conn:${conn})`)\n\n            p1.get(conn)\n        }\n\n        function pump_all() {\n            if (verbose) console.log(`----pump all----`)\n            for (let i = 1000; i >= 0; i--) {\n                if (i == 0) {\n                    console.log(`safety limit exceeded!`)\n                    throw 'bad'\n                }\n                if (verbose) console.log(`i = ${i}`)\n\n                last_n++\n\n                let options = []\n\n                for (let [conn, c] of Object.entries(conns)) {\n                    for (let [pk, pp] of Object.entries(c)) {\n                        if (pp.q.length) {\n                            options.push(() => {\n                                let p = peers[pk]\n                                let msg = pp.q.shift()\n\n                                if (verbose) {\n                                    console.log(`recv p${p.id} (conn:${conn}, ${conns[conn][p.id].other}) msg:${msg.cmd}, ${JSON.stringify(msg)}`)\n                                }\n\n                                p.receive(msg)\n\n                                if (verbose) {\n                                    console.log(...peers.map(p => ({T: p.T, f: p.fissures})))\n                                }\n                            })\n                        }\n                    }\n                }\n\n                if (options.length) {\n                    options[Math.floor(Math.random() * options.length)]()\n                } else break\n            }\n        }\n\n        pump_all()\n\n        if (verbose) console.log(`----resend fissures----`)\n        for (let p of peers) {\n            if (verbose) console.log(`p${p.id} sending fissures`)\n\n            for (let c of Object.keys(p.conns)) p.send({cmd: 'welcome', versions: [], fissures: Object.values(p.fissures), conn: c})\n        }\n\n        pump_all()\n\n        if (verbose) console.log(`----joiner----`)\n\n        peers[0].set({range: '[0:0]', content: '_'})\n\n        pump_all()\n\n        let final_text = peers[0].S\n        if (typeof final_text != 'string') {\n            console.log('final not a string: ', final_text)\n            return false\n        }\n        for (let p of peers) {\n            if (p.S != final_text) {\n                console.log(`peer not in line (we want ${final_text}): `, p)\n                return false\n            }\n            if (Object.keys(p.T).length != 1) {\n                console.log('peer has big T: ', p)\n                return false\n            }\n            if (Object.keys(p.fissures).length != 0) {\n                console.log('peer has fissures: ', p)\n                return false\n            }\n        }\n\n        return true\n        } catch (e) {\n            console.log(`E: ${e}`, e.stack)\n            return false\n        }\n    }\n\n})()\n\n</script>\n"
  },
  {
    "path": "antimatter_ts/tsconfig.json",
    "content": "{\n    \"compilerOptions\": {\n        \"lib\": [\"ES2017\"],\n    }\n}"
  },
  {
    "path": "antimatter_wiki/client.html",
    "content": "<body>\n    <script src=\"https://unpkg.com/@braidjs/antimatter@0.0.20\"></script>\n    <script src=\"https://invisible-college.github.io/universal-sync/diffsync.js\"></script>\n    <!--<script src=\"https://braid.news/diffsync.js\"></script>-->\n    <script src=\"https://invisible.college/js/marked.min.js\"></script>\n    <link rel=\"stylesheet\" href=\"https://invisible.college/css/github-markdown.css\">\n    <style>note {position: absolute; left: 900; width: 400; background-color: #F8F3B7; padding: 10; box-shadow: -2px 2px 2px #ccc; border-radius: 2; text-align: left; font-size: 13; margin-top: -7}</style>\n</body>\n\n<!--<script src=\"https://braid.org/point.js\"></script>-->\n\n<script>\n\nconsole.log(\"__VERSION__\")\n\nvar wiki_host = \"__WIKI_HOST__\"\n\ndocument.title = window.location.pathname.substr(1)\ndocument.body.style.border = '3px solid transparent'\nvar real_errorrr_happened = false\nwindow.onerror = function (e) {\n    real_errorrr_happened = e\n    document.body.style.border = '4px red solid'\n    t.disabled = true\n    t.style.backgroundColor = '#fee'\n}\n\nvar output = document.createElement('div')\noutput.className = 'pad'\noutput.style.maxWidth = '900px'\ndocument.body.append(output)\n\nvar bottom_pad = document.createElement('div')\nbottom_pad.style.height = '50vh'\nbottom_pad.style.display = 'none'\ndocument.body.append(bottom_pad)\n\nvar t_d = make_html(`<div style=\"position:fixed;bottom:0px;right:0px;display:none;background:white\"></div>`)\ndocument.body.append(t_d)\n\nvar t = make_html('<textarea disabled=true style=\"position:absolute;left:0px;top:0px;width:100%;height:100%;background:rgba(0,0,0,0);font-size:15px;font-family:helvetica, monaco, lucida grande, avenir\"></textarea>')\nt_d.append(t)\n\nvar hud = make_html(`<pre style=\"position:absolute;right:20px;top:5px;background:rgba(255,255,255,.9);color:black;margin:0px;padding:4px;font-size: 10;\">hi!</pre>`)\nt_d.append(hud)\n\nvar edit = document.createElement('div')\nedit.style.position = 'fixed'\nedit.style.bottom = '0px'\nedit.style.right = '0px'\nedit.style.padding = '30px'\nedit.style.cursor = 'pointer'\nedit.style.textDecoration = 'underline'\nedit.style.backgroundColor = 'rgba(255, 255, 255, .5)'\nedit.onclick = toggle_editor\nedit.innerText = 'edit'\ndocument.body.append(edit)\n\nvar differ = document.createElement('div')\ndiffer.style.position = 'fixed'\ndiffer.style.bottom = '0px'\ndiffer.style.right = '30px'\ndiffer.style.fontSize = '10px'\ndiffer.style.cursor = 'pointer'\ndiffer.style.textDecoration = 'underline'\ndiffer.style.backgroundColor = 'rgba(100, 100, 100, .2)'\ndiffer.onclick = (e) => {showing_diff=!showing_diff; update_markdown()}\ndiffer.innerText = 'show diffs'\n//document.body.append(differ)\n\nlet a\nvar timer\nvar is_safari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent)\nvar render_delay = is_safari ? 600 : 30    // Safari markdown parsing is slow\nfunction update_markdown_later() {\n    if (timer) clearTimeout(timer)\n    timer = setTimeout(update_markdown, render_delay)\n}\nfunction update_markdown() {\n    timer = null\n    var source = showing_diff ? html_diffed(last_version||'', t.value) : t.value\n    output.innerHTML = marked.parse(source, {sanitize: !is_safe})\n\n    document.body.className = 'nopad'\n\n    update_hud()\n}\nupdate_markdown()\n\nfunction update_hud() {\n    hud.textContent = 'loading..'\n    if (a) {\n        hud.textContent = `versions: ${Object.keys(a.T).length}\n            fissures: ${Object.keys(a.fissures).length}`.replace(/^\\s*/mg, '')\n    }\n}\n\nvar is_safe = true\nvar channel = window.location.pathname.slice(1)\n\nconsole.log('channel: ' + channel)\n\nasync function create_antimatter_client(url, cb, on_receive) {\n    let ws\n    let set_ws = x => ws = x\n    let get_ws = () => ws\n    let send = x => {\n\n        console.log(`ready?: ${ws?.readyState == WebSocket.OPEN}, to send: ${JSON.stringify(x)}`)\n\n        if (ws?.readyState == WebSocket.OPEN && ws.my_id == x.conn) ws.send(JSON.stringify(x))\n    }\n\n    let a = antimatter.create(send, {fissure_lifetime: 1000 * 60 * 60})\n\n    let done\n    let p = new Promise(d => done = d)\n\n    let interval_ms = 1000 * 60\n\n    connect()\n    function connect() {\n        console.log(`try'na connect to: ${url}`)\n        let ws = set_ws(new WebSocket(url))\n        ws.my_id = Math.random().toString(36).slice(2)\n\n        let openned = false\n        let last_ping = Date.now()\n\n        ws.onopen = () => {\n            console.log(`OPEN`)\n            openned = true\n            t.disabled = false\n            localStorage[`antimatter_conn_6c5kho7dle_${ws.my_id}`] = JSON.stringify([Date.now(), Date.now()])\n\n            a.get(ws.my_id)\n\n            last_ping = Date.now()\n            check_ping()\n            function check_ping() {\n                if (ws.readyState > 1) return\n                if (Date.now() - last_ping > 1000 * 30) {\n                    console.log('WS - NO PING, CLOSING!')\n                    return ws.close()\n                }\n                setTimeout(check_ping, 1000)\n            }\n        }\n        ws.onmessage = x => {\n            if (x.data == 'ping') {\n                last_ping = Date.now()\n                ws.send('pong')\n                return\n            }\n\n            console.log(`RECV: ${x.data}`)\n            let m = JSON.parse(x.data)\n\n            let r\n            try {\n                if (!done) on_receive?.(m)\n                if (m.type === 'error') throw new Error(m.message)\n                r = a.receive(m)\n            } catch (e) {\n                a.disconnect(ws.my_id)\n                ws.onclose = () => {}\n                ws.close()\n                alert('copy your work, and reload -- lost sync with server')\n                throw e\n            }\n\n            if (m.cmd == 'welcome') resolve_old_fissures()\n\n            update_hud()\n\n            if (done) done()\n            else if (r?.length) cb(r)\n        }\n\n        let interval = setInterval(() => {\n            if (ws.readyState > 1) return clearInterval(interval)\n            if (!openned) return\n            console.log('INTERVAL')\n            localStorage[`antimatter_conn_6c5kho7dle_${ws.my_id}`] = JSON.stringify([JSON.parse(localStorage[`antimatter_conn_6c5kho7dle_${ws.my_id}`] ?? 'null')?.[1] ?? Date.now(), Date.now()])\n        }, interval_ms)\n\n        ws.onclose = () => {\n            console.log(`CLOSE`)\n            if (openned) {\n                localStorage[`antimatter_conn_6c5kho7dle_${ws.my_id}`] = `[0,0]`\n                a.disconnect(ws.my_id)\n            }\n            setTimeout(() => {\n                if (ws == get_ws()) connect()\n            }, 3000)\n        }\n    }\n\n    window.addEventListener('unload', e => {\n        localStorage[`antimatter_conn_6c5kho7dle_${ws.my_id}`] = `[0,0]`\n        a.forget(ws.my_id)\n    })\n\n    await p\n    done = null\n    return a\n\n    function resolve_old_fissures() {\n        // are we too old?\n        if (JSON.parse(localStorage[`antimatter_conn_6c5kho7dle_${ws.my_id}`])[0] < Date.now() - interval_ms*2.1) return\n\n        // since we're not too old, we can judge other connections for being too old..\n        let old_conns = {}\n        for (let i = 0; i < localStorage.length; i++) {\n            let k = localStorage.key(i)\n            let m = k.match(/^antimatter_conn_6c5kho7dle_(.*)$/)\n            if (m && JSON.parse(localStorage[k])[1] < Date.now() - interval_ms*5) old_conns[m[1]] = true\n        }\n\n        let self = a\n\n        let new_fissures = []\n        let unfissured = {}\n\n        Object.entries(self.fissures).forEach(([fk, f]) => {\n            var other_key = f.b + ':' + f.a + ':' + f.conn\n            var other = self.fissures[other_key]\n\n            if (!other && old_conns[f.conn]) {\n                other = {...f, a: f.b, b: f.a}\n                new_fissures.push(self.fissures[other_key] = other)\n                self.acks_in_process = {}\n\n                console.log(`OLD CONN REMOVAL: ${f.conn}`)\n            }\n\n            if (other) {\n                if (Object.keys(f.versions).length) {\n                    for (let v of Object.keys(f.versions)) unfissured[v] = true\n                    self.fissures[fk] = {...f, versions: {}}\n                }\n                if (Object.keys(other.versions).length) {\n                    for (let v of Object.keys(other.versions)) unfissured[v] = true\n                    self.fissures[other_key] = {...other, versions: {}}\n                }\n            }\n        })\n\n        if (Object.keys(unfissured).length) {\n            let ack_versions = self.ancestors(self.acked_boundary)\n            let unfissured_descendants = self.descendants(unfissured, true)\n            for (let un of Object.keys(unfissured_descendants)) if (ack_versions[un]) delete ack_versions[un]\n            self.acked_boundary = self.get_leaves(ack_versions)\n\n            let u = self.ancestors(self.unack_boundary)\n            for (let x of Object.keys(self.ancestors(unfissured_descendants))) u[x] = true\n            self.unack_boundary = self.get_leaves(u)\n        }\n\n        if (new_fissures.length) send({cmd: 'fissure', fissures: new_fissures, conn: ws.my_id})\n\n        for (let conn of Object.keys(old_conns)) localStorage.removeItem(`antimatter_conn_6c5kho7dle_${conn}`)\n    }\n}\n\n;(async () => {\n    a = await create_antimatter_client(`${wiki_host}/${encodeURIComponent(channel)}`, patches => {\n\n        console.log('PATCHES ' + JSON.stringify(patches, null, '    '))\n\n        for (let patch of patches) {\n\n            console.log(`patch = ${JSON.stringify(patch, null, '    ')}`)\n\n            let [range_start, range_end] = patch.range.match(/\\d+/g)?.map(x => 1*x) ?? [0, 0]\n            let len = patch.content.length\n\n            let sel = [t.selectionStart, t.selectionEnd]\n\n            t.value = t.value.slice(0, range_start) + patch.content + t.value.slice(range_end)\n\n            if (range_end - range_start > 0) {\n                if (range_start < sel[0]) sel[0] -= Math.min(sel[0], range_end) - range_start\n                if (range_start < sel[1]) sel[1] -= Math.min(sel[1], range_end) - range_start\n            }\n            if (range_start <= sel[0]) {\n                sel[0] += len\n                sel[1] += len\n            } else if (range_start < sel[1]) sel[1] += len\n\n            t.selectionStart = sel[0]\n            t.selectionEnd = sel[1]\n        }\n\n        update_markdown_later()\n\n\n    }, msg => {\n        let cv = Object.keys(a.current_version)\n        if (msg.cmd == 'ack' && cv.length == 1 && msg.version == cv[0])\n            t.style['caret-color'] = 'black'\n    })\n\n    console.log(`loaded: ${a.read()}`)\n\n    t.value = a.read()\n    update_markdown_later()\n\n    a.my_on_change = () => {\n        let slices = get_diff_patch(a.read(), t.value)\n        a.set(...slices.map(s => ({range: `[${s[0]}:${s[0] + s[1]}]`, content: s[2]})))\n\n        t.style['caret-color'] = 'rgb(255,0,0)'\n\n        console.log(`diff = ${JSON.stringify(slices)}`)\n\n        update_markdown_later()\n    }\n\n    t.oninput = a.my_on_change\n})()\n\nvar vert = true, editing = false\nfunction render() {\n    t_d.style.display = editing ? null : 'none'\n    bottom_pad.style.display = (editing && vert) ? null : 'none'\n\n    if (vert) {\n        t_d.style.width = '100%'\n        t_d.style.height = '50vh'\n        output.style.width = null\n    } else {\n        t_d.style.width = '45vw'\n        t_d.style.height = '100%'\n        output.style.width = editing ? '55vw' : null\n    }\n}\nvar first_time = true\nfunction toggle_editor () {\n    editing = !editing\n    render()\n    if (editing) t.focus()\n    if (editing && first_time) {\n        first_time = false\n        t.setSelectionRange(0,0)\n        t.scrollTop = 0\n    }\n    update_markdown()\n}\n\ndocument.body.onkeydown = function (e) {\n    var mods = 0\n    for (k in {ctrlKey:1, shiftKey:1, altKey:1, metaKey:1})\n        if (e[k]) mods += 1\n\n    if (e.keyCode == 27\n        //|| (mods >= 2 && e.keyCode == 32)\n        ) {\n        e.stopPropagation()\n        toggle_editor()\n    }\n}\n\nwindow.onresize = function () {\n    var w = window.innerWidth, h = window.innerHeight\n    if (w < 1200 !== vert) {\n        vert = !vert\n        render()\n    }\n}\nonresize()\nrender()\n\nfunction handle_pasted_images (el, cb) {\n    el.addEventListener(\"paste\", function(e) {\n        // 1. Let's look for an image in the clipboard data\n        if (!e.clipboardData || !e.clipboardData.items) return\n        var items = e.clipboardData.items\n        var blob\n        for (var i=0; i<items.length; i++) {\n            if (items[i].type.indexOf(\"image\") === -1) continue\n            blob = items[i].getAsFile()\n        }\n        if (!blob) return\n\n        if (blob.size > 1000000) {\n            insert_at_cursor(t, '<mark>Pasted image is bigger than 1MB.</mark>')\n            return\n        }\n\n        // 2. Now we have the pasted image as a blob.  Let's upload it!\n        var filename = Math.random().toString(36).slice(2) + '.png'\n        fetch('https://invisible.college/wikimg/' + filename, {\n            method: 'PUT',\n            body: blob\n        }).then( r => {\n            if (r.status === 200)\n                cb('https://invisible.college/wiki-images/' + filename)\n        })\n\n    }, false)\n}\n\nhandle_pasted_images(window, x => {\n    insert_at_cursor(t, '<img src=\"'+x+'\">')\n})\n\nfunction insert_at_cursor (textarea, string) {\n    // IE support\n    if (document.selection) {\n        textarea.focus()\n        sel = document.selection.createRange()\n        sel.text = string\n    }\n    // Mozilla and others\n    else if (textarea.selectionStart || textarea.selectionStart == '0') {\n        var startPos = textarea.selectionStart\n        var endPos = textarea.selectionEnd\n        textarea.value = textarea.value.substring(0, startPos)\n            + string\n            + textarea.value.substring(endPos, textarea.value.length)\n    } else\n        textarea.value += string;\n\n    // Now tell antimatter\n    a?.my_on_change()\n}\n\nvar last_version\nvar showing_diff = false\naddEventListener('keypress', function (e) {\n    if (e.ctrlKey && e.key === 's') {\n        last_version = t.value\n        event.preventDefault()\n        update_markdown()\n    }\n})\n\ncolor = (s, c) =>\n    s.split('\\n\\n').map(x => '<span style=\"background-color: '+ c +'\">'+ x +'</span>').join('\\n\\n')\n\ngreen = (s) => color (s, '#cfc')\nred   = (s) => color (s, '#fcc')\nfunction html_diffed (Old, New) {\n    var diff = diff_main(Old, New)\n    var html = diff.map(\n        (x) => x[0] == 1 ? green(x[1])\n            : x[0] == -1 ? red(x[1])\n            : x[1]\n    ).join('')\n    //console.log(JSON.stringify(marked(html, {sanitize:false})))\n    return html\n}\n\n\nvar ting = null\nfunction scroll () {\n    // We only scroll to the ting once -- if it's fresh\n    if (ting || location.hash.length === 0) return\n\n    ting = document.getElementById(location.hash.substr(1))\n    ting && ting.scrollIntoView()\n}\nfor (i=0; i<50; i++)\n    setTimeout(scroll, i / 5.0 * 1000)\n\nfunction make_html(html) {\n    let d = document.createElement('div')\n    d.innerHTML = html\n    return d.firstChild    \n}\n    \n</script>\n"
  },
  {
    "path": "antimatter_wiki/package.json",
    "content": "{\n  \"name\": \"@braidjs/antimatter_wiki\",\n  \"version\": \"0.1.5\",\n  \"description\": \"collaborative wiki using antimatter sync algorithm\",\n  \"main\": \"server.js\",\n  \"author\": \"Braid Working Group\",\n  \"repository\": \"braid-org/braidjs\",\n  \"homepage\": \"https://braid.org/antimatter\",\n  \"dependencies\": {\n    \"@braidjs/antimatter\": \"^0.0.12\",\n    \"ws\": \"^8.16.0\"\n  }\n}\n"
  },
  {
    "path": "antimatter_wiki/readme.md",
    "content": "# MOVED TO https://github.com/braid-org/antimatter_wiki\n\n# Antimatter Wiki\n\nA collaborative wiki based on the [Antimatter Algorithm](https://braid.org/antimatter).\n\nTo use:\n\n```bash\nnpm install @braidjs/antimatter_wiki\n```\n\nThen put this into an app.js:\n\n```javascript\nvar port = 60509, domain = 'localhost:60509', ws_scheme\n\nrequire('@braidjs/antimatter_wiki').serve({\n    port: 60509,\n    domain: 'localhost:60509',\n    ws_prefix: 'wss://'          // Or 'ws://' for insecure websocket\n})\n```\n\nAnd run it with `node app.js`.\n"
  },
  {
    "path": "antimatter_wiki/server.js",
    "content": "\nconsole.log(require('./package.json').version)\n\nvar fs = require('fs')\nvar fs_p = require('fs/promises')\n\nvar {antimatter} = require('@braidjs/antimatter')\n\nvar port = process.argv[2] || 1001\nvar ws_url = process.argv[3] || `ws://localhost:${port}`\nvar fissure_lifetime = 1*(process.argv[4] || 1000 * 60 * 60)\n\nconsole.log(`port = ${port}`)\nconsole.log(`ws_url = ${ws_url}`)\nconsole.log(`fissure_lifetime = ${fissure_lifetime / (1000 * 60 * 60)} hours`)\n\nif (!fs.existsSync('./antimatter_wiki_db')) fs.mkdirSync('./antimatter_wiki_db')\n\nlet conns = {}\n\nlet antimatters = {}\nasync function ensure_antimatter(key) {\n    console.log('finding db at ', JSON.stringify(key))\n    if (!antimatters[key]) antimatters[key] = new Promise(async done => {\n        let dir = `./antimatter_wiki_db/${encodeURIComponent(key)}`\n        if (!fs.existsSync(dir))\n            fs.mkdirSync(dir)\n\n        let files = []\n        for (let filename of await fs_p.readdir(dir)) {\n            let m = filename.match(/^([dw])(\\d+)$/)\n            if (m) files.push({t: m[1], i: 1*m[2]})\n        }\n        files.sort((a, b) => a.i - b.i)\n        let file_i = files[files.length - 1]?.i ?? -1\n    \n        console.log('files: ', files)\n    \n        await Promise.all(files.splice(0, files.reduce((a, b, i) => b.t == 'd' ? i : a, 0)).map(x => fs_p.rm(`${dir}/${x.t}${x.i}`)))\n\n        let a\n\n        function create_antimatter(prev) {\n            let a = antimatter.create(x => {\n                try {\n                    console.log(`key=${key}, sending to [${x.conn}]: ` + JSON.stringify(x).slice(0, 100))\n                    conns[x.conn].send(JSON.stringify(x))\n                } catch (e) {\n                    console.log(`key=${key}, failed to send: ` + e)\n                }\n            }, prev)\n            a.fissure_lifetime = fissure_lifetime\n            if (a.S == null) a.set({range: '', content: ''})\n            return a\n        }\n\n        for (let file of files) {\n            console.log(`file: `, file)\n    \n            let s = await fs_p.readFile(`${dir}/${file.t}${file.i}`)\n            if (file.t == 'd') {\n                a = create_antimatter(JSON.parse(s))\n            } else {\n                for (let line of ('' + s).split(/\\n/)) {\n                    let x = JSON.parse(line || '{}')\n                    if (x.receive) {\n                        try {\n                            a.receive(x.receive)\n                        } catch (e) {}\n                    }\n                    if (x.disconnect) a.disconnect(x.disconnect)\n                }\n            }\n        }\n\n        if (!a) a = create_antimatter()\n    \n        for (let c of Object.keys(a.conns)) a.disconnect(c)\n        for (let c of Object.keys(a.proto_conns)) a.disconnect(c)\n    \n        let dirty = true\n        let wol_filename\n        await compactor()\n        async function compactor() {\n            if (dirty) {\n                dirty = false\n                wol_filename = `${dir}/w${file_i + 2}`\n                await fs_p.writeFile(`${dir}/d${file_i + 1}`, JSON.stringify(a))\n    \n                await Promise.all(files.map(x => fs_p.rm(`${dir}/${x.t}${x.i}`)))\n                files = [{t: 'd', i: file_i + 1}, {t: 'w', i: file_i + 2}]\n                file_i += 2\n            }\n            setTimeout(compactor, 1000 * 60)\n        }\n\n        a.write_to_log = (obj) => {\n            fs.appendFileSync(wol_filename, JSON.stringify(obj) + '\\n')\n            dirty = true\n        }\n\n        done(a)\n    })\n    return await antimatters[key]\n}\n\nfunction respond_with_client (req, res) {\n    var client_html = fs.readFileSync('./client.html')\n    client_html = '' + client_html\n    client_html = client_html.replace(/__VERSION__/, `${require('./package.json').version}`)\n    client_html = client_html.replace(/__WIKI_HOST__/, `${ws_url}`)\n    var etag = require('crypto').createHash('md5').update(client_html).digest('hex')\n    if (req.headers['if-none-match'] === etag) {\n        res.writeHead(304)\n        res.end()\n    } else {\n        res.writeHead(200, {\n            'Content-Type': 'text/html',\n            'Cache-Control': 'public, max-age=31536000',\n            'ETag': etag,\n        })\n        res.end(client_html)\n    }\n}\n\nvar server = require('http').createServer(async function (req, res) {\n    console.log('GET: ', {method: req.method, url: req.url})\n    res.setHeader('Access-Control-Allow-Origin', '*')\n    res.setHeader('Access-Control-Allow-Headers', '*')\n    res.setHeader('Access-Control-Allow-Methods', '*')\n    respond_with_client(req, res)\n})\n\nvar wss = new (require('ws').Server)({server})\nwss.on('connection', (ws, req) => {\n    console.log(`new connection! ${req.url}`)\n\n    let key = decodeURIComponent(req.url.slice(1))\n    if (key === '' || key[0] === '_')\n        key = '_' + key\n    let a_p = ensure_antimatter(key)\n\n    let conn\n    let pong = true\n\n    ping()\n    function ping() {\n        if (ws.readyState > 1) return\n        if (!pong) {\n            console.log(`ping timeout! conn ${conn} key=${key}`)\n            ws.terminate()\n            return\n        }\n        pong = false\n        ws.send('ping')\n        setTimeout(ping, 12000)\n    }\n\n    ws.on('message', async x => {\n        pong = true\n        if (x == 'pong') return\n        if (x == 'ping') return ws.send('pong')\n\n        console.log(`RECV: ${x.slice(0, 100)}`)\n        x = JSON.parse(x)\n\n        if (x.conn) conns[conn = x.conn] = ws\n\n        var a = await a_p\n        a.write_to_log({receive: x})\n        try {\n            a.receive(x)\n        } catch (e) {\n            ws.send(JSON.stringify({type: 'error', message: e.message}))\n        }\n    })\n    ws.on('close', async () => {\n        if (!conn) return\n\n        console.log(`close: ` + conn)\n        var a = await a_p\n        a.write_to_log({disconnect: conn})\n        a.disconnect(conn)\n        delete conns[conn]\n    })\n})\n\nserver.listen(port)\nconsole.log(`listening on port ${port}`)\n"
  },
  {
    "path": "braid-http/braid-http-client.js",
    "content": "// var peer = Math.random().toString(36).substr(2)\n\n// ***************************\n// http\n// ***************************\n\nfunction braidify_http (http) {\n    http.normal_get = http.get\n    http.get = function braid_req (arg1, arg2, arg3) {\n        var url, options, cb\n\n        // http.get() supports two forms:\n        //\n        //  - http.get(url[, options][, callback])\n        //  - http.get(options[, callback])\n        //\n        // We need to know which arguments are which, so let's detect which\n        // form we are looking at.\n\n        // Detect form #1: http.get(url[, options][, callback])\n        if (typeof arg1 === 'string' || arg1 instanceof URL) {\n            url = arg1\n            if (typeof arg2 === 'function')\n                cb = arg2\n            else {\n                options = arg2\n                cb = arg3\n            }\n        }\n\n        // Otherwise it's form #2: http.get(options[, callback])\n        else {\n            options = arg2\n            cb = arg3\n        }\n\n        options = options || {}\n\n        // Now we know where the `options` are specified, let's set headers.\n        if (!options.headers)\n            options.headers = {}\n\n        // Add the subscribe header if this is a subscription\n        if (options.subscribe)\n            options.headers.subscribe = 'true'\n\n        // // Always add the `peer` header\n        // options.headers.peer = options.headers.peer || peer\n\n        // Wrap the callback to provide our new .on('update', ...) feature\n        // on nodejs servers\n        var on_update,\n            on_error,\n            orig_cb = cb\n        cb = (res) => {\n            res.orig_on = res.on\n            res.on = (key, f) => {\n\n                // Define .on('update', cb)\n                if (key === 'update'\n                    || key === 'version' /* Deprecated API calls it 'version' */ ) {\n\n                    // If we have an 'update' handler, let's remember it\n                    on_update = f\n\n                    // And set up a subscription parser\n                    var parser = subscription_parser((update, error) => {\n                        if (!error)\n                            on_update && on_update(update)\n                        else\n                            on_error && on_error(error)\n                    })\n\n                    // That will run each time we get new data\n                    res.orig_on('data', (chunk) => {\n                        parser.read(chunk)\n                    })\n                }\n\n                // Forward .on('error', cb) and remember the error function\n                else if (key === 'error') {\n                    on_error = f\n                    res.orig_on(key, f)\n                }\n\n                // Forward all other .on(*, cb) calls\n                else res.orig_on(key, f)\n            }\n            orig_cb && orig_cb(res)\n        }\n            \n        // Now put the parameters back in their prior order and call the\n        // underlying .get() function\n        if (url) {\n            arg1 = url\n            if (options) {\n                arg2 = options\n                arg3 = cb\n            } else {\n                arg2 = cb\n            }\n        } else {\n            arg1 = options\n            arg2 = cb\n        }\n\n        return http.normal_get(arg1, arg2, arg3)\n    }\n    return http\n}\n\n\n\n// ***************************\n// Fetch\n// ***************************\n\nvar normal_fetch,\n    AbortController,\n    Headers,\n    is_nodejs = typeof window === 'undefined'\n\nif (is_nodejs) {\n    // Nodejs\n\n    // Note that reconnect logic doesn't work in node-fetch, because it\n    // doesn't call the .catch() handler when the stream fails.\n    //\n    // See https://github.com/node-fetch/node-fetch/issues/753\n\n    normal_fetch = require('node-fetch')\n    AbortController = require('abort-controller')\n    Headers = normal_fetch.Headers\n    var to_whatwg_stream = require('web-streams-node').toWebReadableStream\n} else {\n    // Web Browser\n    normal_fetch = window.fetch\n    AbortController = window.AbortController\n    Headers = window.Headers\n    // window.fetch = braid_fetch\n}\n\nasync function braid_fetch (url, params = {}) {\n    params = {...params}  // Copy params, because we'll mutate it\n\n    // Initialize the headers object\n    if (!params.headers)\n        params.headers = new Headers()\n    else\n        params.headers = new Headers(params.headers)\n\n    // Sanity check inputs\n    if (params.version)\n        console.assert(Array.isArray(params.version),\n                       'fetch(): `version` must be an array')\n    if (params.parents)\n        console.assert(Array.isArray(params.parents),\n                       'fetch(): `parents` must be an array')\n\n    // // Always set the peer\n    // params.headers.set('peer', peer)\n\n    // We provide some shortcuts for Braid params\n    if (params.version)\n        params.headers.set('version', params.version.map(JSON.stringify).join(', '))\n    if (params.parents)\n        params.headers.set('parents', params.parents.map(JSON.stringify).join(', '))\n    if (params.subscribe)\n        params.headers.set('subscribe', 'true')\n    if (params.peer)\n        params.headers.set('peer', params.peer)\n\n    // Prevent browsers from going to disk cache\n    params.cache = 'no-cache'\n\n    // Prepare patches\n    if (params.patches) {\n        console.assert(!params.body, 'Cannot send both patches and body')\n        console.assert(typeof params.patches === 'object', 'Patches must be object or array')\n\n        // We accept a single patch as an array of one patch\n        if (!Array.isArray(params.patches))\n            params.patches = [params.patches]\n\n        // If just one patch, send it directly!\n        if (params.patches.length === 1) {\n            let patch = params.patches[0]\n            params.headers.set('Content-Range', `${patch.unit} ${patch.range}`)\n            params.headers.set('Content-Length', `${(new TextEncoder().encode(patch.content)).length}`)\n            params.body = patch.content\n        }\n\n        // Multiple patches get sent within a Patches: N block\n        else {\n            params.headers.set('Patches', params.patches.length)\n            params.body = (params.patches).map(patch => {\n                var length = `content-length: ${(new TextEncoder().encode(patch.content)).length}`\n                var range = `content-range: ${patch.unit} ${patch.range}`\n                return `${length}\\r\\n${range}\\r\\n\\r\\n${patch.content}\\r\\n`\n            }).join('\\r\\n')\n        }\n    }\n\n    // Wrap the AbortController with a new one that we control.\n    //\n    // This is because we want to be able to abort the fetch that the user\n    // passes in.  However, the fetch() command uses a silly \"AbortController\"\n    // abstraction to abort fetches, which has both a `signal` and a\n    // `controller`, and only passes the signal to fetch(), but we need the\n    // `controller` to abort the fetch itself.\n\n    var original_signal = params.signal\n    var underlying_aborter = new AbortController()\n    params.signal = underlying_aborter.signal\n    if (original_signal)\n        original_signal.addEventListener(\n            'abort',\n            () => underlying_aborter.abort()\n        )\n\n    // Now we run the original fetch....\n    var res = await normal_fetch(url, params)\n\n    // And customize the response with a couple methods for getting\n    // the braid subscription data:\n    res.subscribe    = start_subscription\n    res.subscription = {[Symbol.asyncIterator]: iterator}\n\n\n    // Now we define the subscription function we just used:\n    function start_subscription (cb, error) {\n        if (!res.ok)\n            throw new Error('Request returned not ok status:', res.status)\n\n        if (res.bodyUsed)\n            // TODO: check if this needs a return\n            throw new Error('This response\\'s body has already been read', res)\n\n        // Parse the streamed response\n        handle_fetch_stream(\n            res.body,\n\n            // Each time something happens, we'll either get a new\n            // version back, or an error.\n            (result, err) => {\n                if (!err)\n                    // Yay!  We got a new version!  Tell the callback!\n                    cb(result)\n                else {\n                    // This error handling code runs if the connection\n                    // closes, or if there is unparseable stuff in the\n                    // streamed response.\n\n                    // In any case, we want to be sure to abort the\n                    // underlying fetch.\n                    underlying_aborter.abort()\n\n                    // Then send the error upstream.\n                    if (error)\n                        error(err)\n                    else\n                        throw 'Unhandled network error in subscription'\n                }\n            }\n        )\n    }\n\n\n    // And the iterator for use with \"for async (...)\"\n    function iterator () {\n        // We'll keep this state while our iterator runs\n        var initialized = false,\n            inbox = [],\n            resolve = null,\n            reject = null\n\n        return {\n            async next() {\n                // If we've already received a version, return it\n                if (inbox.length > 0)\n                    return {done: false, value: inbox.shift()}\n\n                // Otherwise, let's set up a promise to resolve when we get the next item\n                var promise = new Promise((_resolve, _reject) => {\n                    resolve = _resolve\n                    reject  = _reject\n                })\n\n                // Start the subscription, if we haven't already\n                if (!initialized) {\n                    initialized = true\n\n                    // The subscription will call whichever resolve and\n                    // reject functions the current promise is waiting for\n                    start_subscription(x => resolve(x),\n                                       x => reject(x) )\n                }\n\n                // Now wait for the subscription to resolve or reject the promise.\n                var result = await promise\n\n                // Anything we get from here out we should add to the inbox\n                resolve = (new_version) => inbox.push(new_version)\n                reject  = (err) => {throw err}\n\n                return { done: false, value: result }\n            }\n        }\n    }\n\n    return res\n}\n\n// Parse a stream of versions from the incoming bytes\nasync function handle_fetch_stream (stream, cb) {\n    if (is_nodejs)\n        stream = to_whatwg_stream(stream)\n\n    // Set up a reader\n    var reader = stream.getReader(),\n        parser = subscription_parser(cb)\n    \n    while (true) {\n        var versions = []\n\n        // Read the next chunk of stream!\n        try {\n            var {done, value} = await reader.read()\n        }\n        catch (e) {\n            cb(null, e)\n            return\n        }\n\n        // Check if this connection has been closed!\n        if (done) {\n            console.debug(\"Connection closed.\")\n            cb(null, 'Connection closed')\n            return\n        }\n\n        // Tell the parser to process some more stream\n        parser.read(value)\n    }\n}\n\n\n\n// ****************************\n// Braid-HTTP Subscription Parser\n// ****************************\n\nvar subscription_parser = (cb) => ({\n    // A parser keeps some parse state\n    state: {input: []},\n\n    // And reports back new versions as soon as they are ready\n    cb: cb,\n\n    // You give it new input information as soon as you get it, and it will\n    // report back with new versions as soon as it finds them.\n    read (input) {\n\n        // Store the new input!\n        for (let x of input) this.state.input.push(x)\n\n        // Now loop through the input and parse until we hit a dead end\n        while (this.state.input.length) {\n\n            // Try to parse an update\n            try {\n                this.state = parse_update (this.state)\n            } catch (e) {\n                this.cb(null, e)\n                return\n            }\n\n            // Maybe we parsed an update!  That's cool!\n            if (this.state.result === 'success') {\n                var update = {\n                    version: this.state.version,\n                    parents: this.state.parents,\n                    body:    this.state.body,\n                    patches: this.state.patches,\n\n                    // Output extra_headers if there are some\n                    extra_headers: extra_headers(this.state.headers)\n                }\n                for (var k in update)\n                    if (update[k] === undefined) delete update[k]\n                this.cb(update)\n\n                // Reset the parser for the next version!\n                this.state = {input: this.state.input}\n            }\n\n            // Or maybe there's an error to report upstream\n            else if (this.state.result === 'error') {\n                this.cb(null, this.state.message)\n                return\n            }\n\n            // We stop once we've run out of parseable input.\n            if (this.state.result == 'waiting') break\n        }\n    }\n})\n\n\n// ****************************\n// General parsing functions\n// ****************************\n//\n// Each of these functions takes parsing state as input, mutates the state,\n// and returns the new state.\n//\n// Depending on the parse result, each parse function returns:\n//\n//  parse_<thing> (state)\n//  => {result: 'waiting', ...}  If it parsed part of an item, but neeeds more input\n//  => {result: 'success', ...}  If it parses an entire item\n//  => {result: 'error', ...}    If there is a syntax error in the input\n\n\nfunction parse_update (state) {\n    // If we don't have headers yet, let's try to parse some\n    if (!state.headers) {\n        var parsed = parse_headers(state.input)\n\n        // If header-parsing fails, send the error upstream\n        if (parsed.result === 'error')\n            return parsed\n        if (parsed.result === 'waiting') {\n            state.result = 'waiting'\n            return state\n        }\n\n        state.headers = parsed.headers\n        state.version = state.headers.version\n        state.parents = state.headers.parents\n\n        // Take the parsed headers out of the buffer\n        state.input = parsed.input\n    }\n\n    // We have headers now!  Try parsing more body.\n    return parse_body(state)\n}\n\n// Parsing helpers\nfunction parse_headers (input) {\n\n    var h = extractHeader(input)\n    if (!h) return {result: 'waiting'}\n\n    var headers_source = h.header_string\n    var headers_length = headers_source.length\n    \n    // Let's parse them!  First define some variables:\n    var headers = {},\n        header_regex = /(:?[\\w-_]+):\\s?(.*)\\r?\\n?/gy,  // Parses one line a time\n        match,\n        found_last_match = false\n\n    // And now loop through the block, matching one line at a time\n    while (match = header_regex.exec(headers_source)) {\n        // console.log('Header match:', match && [match[1], match[2]])\n        headers[match[1].toLowerCase()] = match[2]\n\n        // This might be the last line of the headers block!\n        if (header_regex.lastIndex === headers_length)\n            found_last_match = true\n    }\n\n    // If the regex failed before we got to the end of the block, throw error:\n    if (!found_last_match)\n        return {\n            result: 'error',\n            message: 'Parse error in headers: \"'\n                + JSON.stringify(headers_source.substr(header_regex.lastIndex)) + '\"',\n            headers_so_far: headers,\n            last_index: header_regex.lastIndex, headers_length\n        }\n\n    // Success!  Let's parse special headers\n    if ('version' in headers)\n        headers.version = JSON.parse('['+headers.version+']')\n    if ('parents' in headers)\n        headers.parents = JSON.parse('['+headers.parents+']')\n    if ('patches' in headers)\n        headers.patches = JSON.parse(headers.patches)\n\n    // Update the input\n    input = h.remaining_bytes\n\n    // And return the parsed result\n    return { result: 'success', headers, input }\n}\n\n// Content-range is of the form '<unit> <range>' e.g. 'json .index'\nfunction parse_content_range (range_string) {\n    var match = range_string.match(/(\\S+)( (.*))?/)\n    return match && {unit: match[1], range: match[3] || ''}\n}\nfunction parse_body (state) {\n\n    // Parse Body Snapshot\n\n    var content_length = parseInt(state.headers['content-length'])\n    if (!isNaN(content_length)) {\n\n        // We've read a Content-Length, so we have a block to parse\n        if (content_length > state.input.length) {\n            // But we haven't received the whole block yet\n            state.result = 'waiting'\n            return state\n        }\n\n        // We have the whole block!\n        state.result = 'success'\n\n        // If we have a content-range, then this is a patch\n        if (state.headers['content-range']) {\n            var match = parse_content_range(state.headers['content-range'])\n            if (!match)\n                return {\n                    result: 'error',\n                    message: 'cannot parse content-range',\n                    range: state.headers['content-range']\n                }\n            state.patches = [{\n                unit: match.unit,\n                range: match.range,\n                content: (new TextDecoder('utf-8')).decode(new Uint8Array(state.input.slice(0, content_length))),\n\n                // Question: Perhaps we should include headers here, like we do for\n                // the Patches: N headers below?\n\n                // headers: state.headers\n            }]\n        }\n\n        // Otherwise, this is a snapshot body\n        else\n            state.body = (new TextDecoder('utf-8')).decode(new Uint8Array(state.input.slice(0, content_length)))\n\n        state.input = state.input.slice(content_length)\n        return state\n    }\n\n    // Parse Patches\n\n    else if (state.headers.patches != null) {\n        state.patches = state.patches || []\n\n        var last_patch = state.patches[state.patches.length-1]\n\n        // Parse patches until the final patch has its content filled\n        while (!(state.patches.length === state.headers.patches\n                 && (state.patches.length === 0 || 'content' in last_patch))) {\n\n            // Are we starting a new patch?\n            if (!last_patch || 'content' in last_patch) {\n                last_patch = {}\n                state.patches.push(last_patch)\n            }\n\n            // Parse patch headers\n            if (!('headers' in last_patch)) {\n                var parsed = parse_headers(state.input)\n\n                // If header-parsing fails, send the error upstream\n                if (parsed.result === 'error')\n                    return parsed\n                if (parsed.result === 'waiting') {\n                    state.result = 'waiting'\n                    return state\n                }\n\n                // We parsed patch headers!  Update state.\n                last_patch.headers = parsed.headers\n                state.input = parsed.input\n            }\n\n            // Todo: support custom patches, not just range-patch\n\n            // Parse Range Patch format\n            {\n                if (!('content-length' in last_patch.headers))\n                    return {\n                        result: 'error',\n                        message: 'no content-length in patch',\n                        patch: last_patch, input: (new TextDecoder('utf-8')).decode(new Uint8Array(state.input))\n                    }\n\n                if (!('content-range' in last_patch.headers))\n                    return {\n                        result: 'error',\n                        message: 'no content-range in patch',\n                        patch: last_patch, input: (new TextDecoder('utf-8')).decode(new Uint8Array(state.input))\n                    }\n\n                var content_length = parseInt(last_patch.headers['content-length'])\n\n                // Does input have the entire patch contents yet?\n                if (state.input.length < content_length) {\n                    state.result = 'waiting'\n                    return state\n                }\n\n                var match = parse_content_range(last_patch.headers['content-range'])\n                if (!match)\n                    return {\n                        result: 'error',\n                        message: 'cannot parse content-range in patch',\n                        patch: last_patch, input: (new TextDecoder('utf-8')).decode(new Uint8Array(state.input))\n                    }\n\n                last_patch.unit = match.unit\n                last_patch.range = match.range\n                last_patch.content = (new TextDecoder('utf-8')).decode(new Uint8Array(state.input.slice(0, content_length)))\n                last_patch.extra_headers = extra_headers(last_patch.headers)\n                delete last_patch.headers  // We only keep the extra headers ^^\n\n                // Consume the parsed input\n                state.input = state.input.slice(content_length)\n            }\n        }\n\n        state.result = 'success'\n        return state\n    }\n\n    return {\n        result: 'error',\n        message: 'cannot parse body without content-length or patches header'\n    }\n}\n\n// The \"extra_headers\" field is returned to the client on any *update* or\n// *patch* to include any headers that we've received, but don't have braid\n// semantics for.\n//\n// This function creates that hash from a headers object, by filtering out all\n// known headers.\nfunction extra_headers (headers) {\n    // Clone headers\n    var result = Object.assign({}, headers)\n\n    // Remove the non-extra parts\n    var known_headers = ['version', 'parents', 'patches',\n                         'content-length', 'content-range']\n    for (var i = 0; i < known_headers.length; i++)\n        delete result[known_headers[i]]\n\n    // Return undefined if we deleted them all\n    if (Object.keys(result).length === 0)\n        return undefined\n\n    return result\n}\n\n// a parsing utility function that will inspect a byte array of incoming data\n// to see if there is header information at the beginning,\n// namely some non-newline characters followed by two newlines\nfunction extractHeader(input) {\n    // Find the start of the headers\n    let begin_headers_i = 0;\n    while (input[begin_headers_i] === 13 || input[begin_headers_i] === 10) {\n        begin_headers_i++;\n    }\n    if (begin_headers_i === input.length) {\n        return null; // Incomplete headers\n    }\n\n    // Look for the double-newline at the end of the headers\n    let end_headers_i = begin_headers_i;\n    let size_of_tail = 0;\n    while (end_headers_i < input.length) {\n        if (input[end_headers_i] === 10 && input[end_headers_i + 1] === 10) {\n            size_of_tail = 2;\n            break;\n        }\n        if (input[end_headers_i] === 10 && input[end_headers_i + 1] === 13 && input[end_headers_i + 2] === 10) {\n            size_of_tail = 3;\n            break;\n        }\n        if (input[end_headers_i] === 13 && input[end_headers_i + 1] === 10 && input[end_headers_i + 2] === 10) {\n            size_of_tail = 3;\n            break;\n        }\n        if (input[end_headers_i] === 13 && input[end_headers_i + 1] === 10 && input[end_headers_i + 2] === 13 && input[end_headers_i + 3] === 10) {\n            size_of_tail = 4;\n            break;\n        }\n\n        end_headers_i++;\n    }\n\n    // If no double-newline is found, wait for more input\n    if (end_headers_i === input.length) {\n        return null; // Incomplete headers\n    }\n\n    // Extract the header string\n    const headerBytes = input.slice(begin_headers_i, end_headers_i);\n    const headerString = new TextDecoder('utf-8').decode(new Uint8Array(headerBytes));\n\n    // Return the remaining bytes and the header string\n    const remainingBytes = input.slice(end_headers_i + size_of_tail);\n    return {\n        remaining_bytes: remainingBytes,\n        header_string: headerString\n    };\n}\n\n// ****************************\n// Exports\n// ****************************\n\nif (typeof module !== 'undefined' && module.exports)\n    module.exports = {\n        fetch: braid_fetch,\n        http: braidify_http,\n        subscription_parser,\n        parse_update,\n        parse_headers,\n        parse_body\n    }\n"
  },
  {
    "path": "braid-http/braid-http-server.js",
    "content": "var assert = require('assert')\n\n// Return a string of patches in pseudoheader format.\n//\n//   The `patches` argument can be:\n//     - Array of patches\n//     - A single patch\n//\n//   Multiple patches are generated like:\n//\n//       Patches: n\n//\n//       content-length: 21\n//       content-range: json .range\n//\n//       {\"some\": \"json object\"}\n//\n//       content-length: x\n//       ...\n//\n//   A single patch is generated like:\n//\n//       content-length: 21\n//       content-range: json .range\n//\n//       {\"some\": \"json object\"}\n//\nfunction generate_patches(res, patches) {\n    var result = ''\n\n    // `patches` must be a patch object or an array of patch objects\n    //  - Object:  {unit, range, content}\n    //  - Array:  [{unit, range, content}, ...]\n\n    assert(typeof patches === 'object')  // An array is also an object\n\n    // An array of one patch behaves like a single patch\n    if (Array.isArray(patches)) {\n\n        // Add `Patches: N` header if array\n        result += `Patches: ${patches.length}\\r\\n\\r\\n`\n    } else\n        // Else, we'll out put a single patch\n        patches = [patches]\n\n    // Generate each patch\n    patches.forEach((patch, i) => {\n        assert(typeof patch.unit    === 'string')\n        assert(typeof patch.range   === 'string')\n        assert(typeof patch.content === 'string')\n\n        if (i > 0)\n            result += '\\r\\n\\r\\n'\n\n        let extra_headers = Object.fromEntries(Object.entries(patch).filter(([k, v]) => k != 'unit' && k != 'range' && k != 'content'))\n\n        result += `Content-Length: ${(new TextEncoder().encode(patch.content)).length}\\r\nContent-Range: ${patch.unit} ${patch.range}\\r\n${Object.entries(extra_headers).map(([k, v]) => `${k}: ${v}\\r\\n`).join('')}\\r\n${patch.content}`\n    })\n    return result\n}\n\n\n// Deprecated method for legacy support\nfunction parse_patches (req, cb) {\n    parse_update(req, update => {\n        if (typeof update.body === 'string')\n            // Return body as an \"everything\" patch\n            cb([{unit: 'everything', range: '', content: update.body}])\n        else\n            cb(update.patches)\n    })\n}\n\n// This function reads an update (either a set of patches, or a body) from a\n// ReadableStream and then fires a callback when finished.\nfunction parse_update (req, cb) {\n    var num_patches = req.headers.patches\n\n    if (!num_patches && !req.headers['content-range']) {\n        var body = ''\n        req.on('data', chunk => {body += chunk.toString()})\n        req.on('end', () => {\n            cb({ body, patches: undefined })\n        })\n    }\n\n    // Parse a single patch, lacking Patches: N\n    else if (num_patches === undefined && req.headers['content-range']) {\n\n        // We only support range patches right now, so there must be a\n        // Content-Range header.\n        assert(req.headers['content-range'], 'No patches to parse: need `Patches: N` or `Content-Range:` header in ' + JSON.stringify(req.headers))\n\n        // Parse the Content-Range header\n        // Content-range is of the form '<unit> <range>' e.g. 'json .index'\n        var [unit, range] = parse_content_range(req.headers['content-range'])\n\n        // The contents of the patch is in the request body\n        var buffer = []\n        // Read the body one chunk at a time\n        req.on('data', chunk => buffer.push(chunk))\n        // Then return it\n        req.on('end', () => {\n            patches = [{unit, range, content: Buffer.concat(buffer).toString('utf8')}]\n            cb({ patches, body: undefined })\n        })\n    }\n\n    // Parse multiple patches within a Patches: N block\n    else {\n        num_patches = parseInt(num_patches)\n        let patches = []\n        let buffer = []\n\n        // We check to send send patches each time we parse one.  But if there\n        // are zero to parse, we will never check to send them.\n        if (num_patches === 0)\n            return cb({ patches: [], body: undefined })\n\n        req.on('data', function parse (chunk) {\n\n            // Merge the latest chunk into our buffer\n            for (let x of chunk) buffer.push(x)\n\n            while (patches.length < num_patches) {\n                let h = extractHeader(buffer)\n                if (!h) return\n\n                // Now let's parse those headers.\n                var headers = require('parse-headers')(h.header_string)\n\n                // We require `content-length` to declare the length of the patch.\n                if (!('content-length' in headers)) {\n                    // Print a nice error if it's missing\n                    console.error('No content-length in', JSON.stringify(headers),\n                                  'from', {buffer})\n                    process.exit(1)\n                }\n\n                var body_length = parseInt(headers['content-length'])\n\n                // Give up if we don't have the full patch yet.\n                if (h.remaining_bytes.length < body_length)\n                    return\n\n                // XX Todo: support custom patch types beyond content-range.\n\n                // Content-range is of the form '<unit> <range>' e.g. 'json .index'\n                var [unit, range] = parse_content_range(headers['content-range'])\n                var patch_content = new TextDecoder('utf-8').decode(new Uint8Array(h.remaining_bytes.slice(0, body_length)))\n\n                // We've got our patch!\n                patches.push({unit, range, content: patch_content})\n\n                buffer = h.remaining_bytes.slice(body_length)\n            }\n\n            // We got all the patches!  Pause the stream and tell the callback!\n            req.pause()\n            cb({ patches, body: undefined })\n        })\n        req.on('end', () => {\n            // If the stream ends before we get everything, then return what we\n            // did receive\n            console.error('Request stream ended!')\n            if (patches.length !== num_patches)\n                console.error(`Got an incomplete PUT: ${patches.length}/${num_patches} patches were received`)\n        })\n    }\n}\n\nfunction parse_content_range (range_string) {\n    var match = range_string.match(/(\\S+)( (.*))?/)\n    if (!match) throw 'Cannot parse Content-Range in ' + string\n    var [unit, range] = [match[1], match[3] || '']\n    return [unit, range]\n}\n\nfunction braidify (req, res, next) {\n    // console.log('\\n## Braidifying', req.method, req.url, req.headers.peer)\n\n    // First, declare that we support Patches and JSON ranges.\n    res.setHeader('Range-Request-Allow-Methods', 'PATCH, PUT')\n    res.setHeader('Range-Request-Allow-Units', 'json')\n\n    // Extract braid info from headers\n    var version = ('version' in req.headers) && JSON.parse('['+req.headers.version+']'),\n        parents = ('parents' in req.headers) && JSON.parse('['+req.headers.parents+']'),\n        peer = req.headers['peer'],\n        url = req.url.substr(1)\n\n    // Parse the subscribe header\n    var subscribe = req.headers.subscribe\n    if (subscribe === 'true')\n        subscribe = true\n\n    // Define convenience variables\n    req.version   = version\n    req.parents   = parents\n    req.subscribe = subscribe\n\n    // Add the braidly request/response helper methods\n    res.sendUpdate = (stuff) => send_update(res, stuff, req.url, peer)\n    res.sendVersion = res.sendUpdate\n    req.parseUpdate = () => new Promise(\n        (done, err) => parse_update(req, (update) => done(update))\n    )\n    req.patches = () => new Promise(\n        (done, err) => parse_patches(req, (patches) => done(patches))\n    )\n    req.patchesJSON = () => new Promise(\n        (done, err) => parse_patches(\n            req,\n            (patches) => done(patches.map(\n                p => ({...p, content: JSON.parse(p.content)})\n            ))\n        )\n    )\n    req.startSubscription = res.startSubscription =\n        function startSubscription (args = {}) {\n            // console.log('Starting subscription!')\n            // console.log('Timeouts are:',\n            //             req.socket.server.timeout,\n            //             req.socket.server.keepAliveTimeout)\n\n            res.isSubscription = true\n\n            // Let's disable the timeouts (if it exists)\n            if (req.socket.server)\n                req.socket.server.timeout = 0.0\n\n            // We have a subscription!\n            res.statusCode = 209\n            res.setHeader(\"subscribe\", req.headers.subscribe)\n            res.setHeader('cache-control', 'no-cache, no-transform')\n\n\n            // Note: I used to explicitly disable transfer-encoding chunked\n            // here by setting the header to empty string.  This is the only\n            // way I know to disable it in nodejs.  We don't need chunked\n            // encoding in subscriptions, because chunked encoding is used to\n            // signal the end of a response, and subscriptions don't end.  I\n            // disabled them to make responses cleaner.  However, it turns out\n            // the Caddy proxy throws an error if it receives a response with\n            // transfer-encoding: set to the empty string.  So I'm disabling\n            // it now.\n\n            // if (req.httpVersionMajor == 1) {\n            //     // Explicitly disable transfer-encoding chunked for http 1\n            //     res.setHeader('transfer-encoding', '')\n            // }\n\n            // Tell nginx not to buffer the subscription\n            res.setHeader('X-Accel-Buffering', 'no')\n\n            var connected = true\n            function disconnected (x) {\n                if (!connected) return\n                connected = false\n                // console.log(`Connection closed on ${req.url} from`, x, 'event')\n\n                // Now call the callback\n                if (args.onClose)\n                    args.onClose()\n            }\n\n            res.on('close',   x => disconnected('close'))\n            res.on('finish',  x => disconnected('finish'))\n            req.on('abort',   x => disconnected('abort'))\n        }\n\n    // Check the Useragent to work around Firefox bugs\n    if (req.headers['user-agent']\n        && typeof req.headers['user-agent'] === 'string'\n        && req.headers['user-agent'].toLowerCase().indexOf('firefox') > -1)\n        res.is_firefox = true\n\n    next && next()\n}\n\nfunction send_update(res, data, url, peer) {\n    var {version, parents, patches, patch, body} = data\n\n    function set_header (key, val) {\n        if (res.isSubscription)\n            res.write(`${key}: ${val}\\r\\n`)\n        else\n            res.setHeader(key, val)\n    }\n    function write_body (body) {\n        if (res.isSubscription)\n            res.write('\\r\\n' + body)\n        else\n            res.write(body)\n    }\n\n    // console.log('sending version', {url, peer, version, parents, patches, body,\n    //                                 subscription: res.isSubscription})\n\n    // Validate that the body and patches are strings\n    if (body !== undefined)\n        assert(typeof body === 'string')\n    else {\n        // Only one of patch or patches can be set\n        assert(!(patch && patches))\n        assert((patch || patches) !== undefined)\n        assert((patch || patches) !== null)\n\n        // Patches must be an array\n        if (patches)\n            assert(Array.isArray(patches))\n\n        // But if using `patch`, then we set `patches` to just that object\n        if (patch)\n            patches = patch\n\n        // Now `patches` will be an array of patches or a single patch object.\n        //\n        // This distinction is used in generate_patches() to determine whether\n        // to inline a single patch in the update body vs. writing out a\n        // Patches: N block.\n        assert(typeof patches === 'object')\n        if (Array.isArray(patches))\n            patches.forEach(p => {\n                assert('unit' in p)\n                assert('range' in p)\n                assert('content' in p)\n                assert(typeof p.content === 'string')\n            })\n    }\n\n    var body_exists = body || body === ''\n    assert(body_exists || patches, 'Missing body or patches')\n    assert(!(body_exists && patches), 'Cannot send both body and patches')\n\n    // Write the headers or virtual headers\n    for (var [header, value] of Object.entries(data)) {\n        header = header.toLowerCase()\n\n        // A header set to undefined acts like it wasn't set\n        if (value === undefined)\n            continue\n\n        // Version and Parents get output in the Structured Headers format,\n        // so we convert `value` from array to comma-separated strings.\n        if (header === 'version') {\n            header = 'Version'               // Capitalize for prettiness\n            value = value.map(JSON.stringify).join(\", \")\n        } else if (header === 'parents') {\n            header = 'Parents'               // Capitalize for prettiness\n            value = value.map(JSON.stringify).join(\", \")\n        }\n\n        // We don't output patches or body yet\n        else if (header === 'patches' || header === 'body' || header === 'patch')\n            continue\n\n        set_header(header, value)\n    }\n\n    // Write the patches or body\n    if (typeof body === 'string') {\n        set_header('Content-Length', (new TextEncoder().encode(body)).length)\n        write_body(body)\n    } else\n        res.write(generate_patches(res, patches))\n\n    // Add a newline to prepare for the next version\n    // See also https://github.com/braid-org/braid-spec/issues/73\n    if (res.isSubscription) {\n        var extra_newlines = 1\n        if (res.is_firefox)\n            // Work around Firefox network buffering bug\n            // See https://github.com/braid-org/braidjs/issues/15\n            extra_newlines = 240\n\n        for (var i = 0; i < 1 + extra_newlines; i++)\n            res.write(\"\\r\\n\")\n    }\n}\n\n// a parsing utility function that will inspect a byte array of incoming data\n// to see if there is header information at the beginning,\n// namely some non-newline characters followed by two newlines\nfunction extractHeader(input) {\n    // Find the start of the headers\n    let begin_headers_i = 0;\n    while (input[begin_headers_i] === 13 || input[begin_headers_i] === 10) {\n        begin_headers_i++;\n    }\n    if (begin_headers_i === input.length) {\n        return null; // Incomplete headers\n    }\n\n    // Look for the double-newline at the end of the headers\n    let end_headers_i = begin_headers_i;\n    let size_of_tail = 0;\n    while (end_headers_i < input.length) {\n        if (input[end_headers_i] === 10 && input[end_headers_i + 1] === 10) {\n            size_of_tail = 2;\n            break;\n        }\n        if (input[end_headers_i] === 10 && input[end_headers_i + 1] === 13 && input[end_headers_i + 2] === 10) {\n            size_of_tail = 3;\n            break;\n        }\n        if (input[end_headers_i] === 13 && input[end_headers_i + 1] === 10 && input[end_headers_i + 2] === 10) {\n            size_of_tail = 3;\n            break;\n        }\n        if (input[end_headers_i] === 13 && input[end_headers_i + 1] === 10 && input[end_headers_i + 2] === 13 && input[end_headers_i + 3] === 10) {\n            size_of_tail = 4;\n            break;\n        }\n\n        end_headers_i++;\n    }\n\n    // If no double-newline is found, wait for more input\n    if (end_headers_i === input.length) {\n        return null; // Incomplete headers\n    }\n\n    // Extract the header string\n    const headerBytes = input.slice(begin_headers_i, end_headers_i);\n    const headerString = new TextDecoder('utf-8').decode(new Uint8Array(headerBytes));\n\n    // Return the remaining bytes and the header string\n    const remainingBytes = input.slice(end_headers_i + size_of_tail);\n    return {\n        remaining_bytes: remainingBytes,\n        header_string: headerString\n    };\n}\n\nmodule.exports = braidify\n"
  },
  {
    "path": "braid-http/contributing.md",
    "content": "# Contributing to Braid-HTTP\n\nThis is core code, and I'd like it to meet everyone's needs!  I welcome\nsuggestions and improvements on all aspects, including style, features, API\ndesign, and algorithm design.  I'd like to refine this code into a great\nreference library.\n\nI also promise to fix bugs and generally help you get your needs met.  Just\nlet me know what you want!\n\n—Michael"
  },
  {
    "path": "braid-http/demos/blog/README",
    "content": "This is a demo blog / chat.\n\nTo run the demo:\n\n    git clone https://github.com/braid-work/braidjs.git\n    cd braidjs/demos/blog\n    npm install express http2-express-bridge\n    node server.js\n\nThen open https://localhost:3009/ in your browser, and then click through the\nself-signed certificate warnings to \"proceed anyway\"."
  },
  {
    "path": "braid-http/demos/blog/certificate",
    "content": "-----BEGIN CERTIFICATE-----\nMIIDXTCCAkWgAwIBAgIJALgm2/aRZmh6MA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV\nBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX\naWRnaXRzIFB0eSBMdGQwHhcNMjAwOTEzMDU1NDI1WhcNMjEwOTEzMDU1NDI1WjBF\nMQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50\nZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB\nCgKCAQEAvt7W6Y6IKMhIi/PGLWPMj+jWC/Ne2P2Yhqx9kuUN+hkXF8ZeFfIXpKI+\nJRLObDB7Me/8y4bgGSUQAPEowi75gk/jShvEhoMoV3G4B1ERmJ2xe02hUY3AiRuT\nQj7Z26y6C1/zJ+sCyPbeHBOyuCo341qjTYZMKKWfKDX0CGex+pLQupSYXsSq4NPY\naVOlvxoqw4FBprxNXyjIs2PgX3LoGMNYVuBT4/F1B+Jwn62HNJdjGNFMW1272kC7\n/NAd+V0cIcvDWojpX3DQXCOty+EVc58grjIfSi1N4Gq9OsjM49BV3neNknHSlxZ1\nmNbthBCMc/7xWD0AVZhYdrquhDsKdQIDAQABo1AwTjAdBgNVHQ4EFgQUxcq1RR+P\ndDFTGHLBdHU4s5WmYaIwHwYDVR0jBBgwFoAUxcq1RR+PdDFTGHLBdHU4s5WmYaIw\nDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAgp03sPUbQO73H4iZq87h\nu+zDCiVjB88KTrXvkB4jQ7nLaGOwicMQKDp/fDl19HrF4gUdRZIRadPGxMS25yNz\nCOQ+kRy/wVLGU6G6XfCjkhF/zuI+X6Bgku+trYqaL/bZsIe3VHLnUdpGetrDR0yO\nn6YyGPkPm0uYOF8apt2/BZ4jRNACAThnKMf+0cyTi5xI3sz0E84adLcU5CIV3fcq\nxeREOP7QUnnXR/isR39HgUAtJ1JGKB/KK7PbUxzqD5iTGZU9KGLxNw+zDmnJA5BQ\nMdF3u583S7W7xLsZf6rAx3FQBUJ5puyC+qMeU/JhS0rzBR/auUXGpkBU2ZPb846H\nzg==\n-----END CERTIFICATE-----"
  },
  {
    "path": "braid-http/demos/blog/client.html",
    "content": "<script type=coffee>\n# ##########################\n# UI written in coffeescript\n# \n\nfontFamily = 'avenext, avenir, sans'\n\ndom.BODY = ->\n  window.send = ->\n    # Reset the input box\n    new_message = state['new-message']\n    state['new-message'] = ''\n\n    # Save the post\n    post_id = Math.random().toString(36).substr(2)\n    state['/post/' + post_id] = {body: new_message}\n\n    # Add it to the blog\n    blog = state['/blog']\n    blog.push({link: '/post/' + post_id})\n\n  DIV\n    fontFamily: fontFamily\n    fontSize: 14\n\n    H1 \"it's a blog!\"\n    for post in state['/blog']\n      POST(url: post.link)\n    DIV\n      padding: 5\n      AUTOSIZEBOX\n        id: 'new-message'\n        fontFamily: fontFamily\n        width: 'calc(100% - 50px)'\n        padding: 5\n        backgroundColor: '#eee'\n        value: state['new-message']\n        onChange: (e) ->\n          state['new-message'] = e.target.value\n        onKeyDown: (e) ->\n          if (e.keyCode == 13 and not (e.ctrlKey or e.shiftKey))\n            e.preventDefault()\n            e.stopPropagation()\n            send()\n      BUTTON\n        position: 'relative'\n        bottom: 9\n        height: 27\n        width: 48\n        onClick: send\n        'Send'\n\n# Auto-focus the message input box\ndom.BODY.refresh = ->\n  document.getElementById('new-message')?.focus()\n\n\ndom.POST = (url) ->\n  post = state[url]\n  DIV\n    backgroundColor: '#eee'\n    padding: 5\n    margin: 5\n    DIV p for p in post.body.split('\\n')\n  \n\n## An auto-resizing <textarea>\ndom.AUTOSIZEBOX = ->\n  props = bus.clone(@props)\n  props.ref = 'textbox'\n  props.rows = 1\n  delete props['data-widget']\n  delete props['data-key']\n  TEXTAREA(props)\n\ndom.AUTOSIZEBOX.up = ->\n  target = @refs.textbox.getDOMNode()\n  resizebox(target)\n\ndom.AUTOSIZEBOX.refresh = ->\n  target = @refs.textbox.getDOMNode()\n  resizebox(target)\n\nresizebox = (target) ->\n  while (target.rows > 1 && target.scrollHeight < target.offsetHeight)\n    target.rows--\n  while (target.scrollHeight > target.offsetHeight && target.rows < 10000)\n    target.rows++\n\n</script>\n<script>\nvar peerid = Math.random().toString(36).substr(3)\n\n// Connect the Braid Blog as a state backend to Statebus\nvar curr_version = {}\n\nwindow.statebus_ready = [() => {\n    // This is how we fetch the /blog state\n    bus('/*').getter = (key, old, t) => {\n        function subscribe () {\n            // console.log('Subscribing to', {key, url},\n            //             'exists?', bus.gets_out[key].length)\n            // if (!bus.gets_out[key]) return\n            braid_fetch(key, {subscribe: true, peer: peerid}).then(\n                response => response.subscribe(\n                    update => {\n                        console.log('We got an update!', update)\n                        console.assert(\n                            !curr_version[key] || (parseInt(curr_version[key][0]) + 1\n                                                   === parseInt(response.version[0])),\n                            'Bad versions:' + JSON.stringify(\n                                {curr_version, res_ver:response.version}\n                            )\n                        )\n\n                        curr_version[key] = response.version\n                        if (update.patches)\n                            t.return(apply_patches(update.patches, old))\n                        else\n                            t.return({key: key, val: bus.escape_to_nelson(JSON.parse(update.body))})\n                    },\n                    e => setTimeout(subscribe, 1000)\n                )\n            ).catch(e => setTimeout(subscribe, 1000))\n        }\n        subscribe()\n    }\n\n    // This is how we save the /blog state\n    bus('/*').setter = (obj, t) => {\n        curr_version++\n        if (obj.key === '/blog')\n            var patches = [{\n                unit: 'json',\n                range: '[-0:-0]',\n                content: JSON.stringify(\n                  bus.unescape_from_nelson(bus.unescape_from_bus(obj.val[obj.val.length-1]))\n                )\n            }]\n        else\n            var body = JSON.stringify(obj.val)\n\n        braid_fetch(obj.key, {\n            method: 'PUT',\n            peer: peerid,\n            headers: {'Content-Type': 'application/json'},\n            patches,\n            body\n        }).then(res => {\n            if (res.status === 200)\n                console.debug('braid_put complete')\n            else\n                console.debug('braid_put failed with', res.status)\n        }).catch(err => console.error('braid_put error', err))\n        t.done()\n    }\n}]\n\n// This is how we apply a braid patch to our blog state\nfunction apply_patches (patches, object) {\n    for (patch of patches)\n        // There are only two types of patches we could receive\n        if (patch.range === '')\n            // The entire blog in one patch\n            return {val: bus.escape_to_nelson(JSON.parse(patch.content))}\n        else\n            // A single new message, appended to the end\n            object.val.push(bus.escape_to_nelson(JSON.parse(patch.content)))\n    return object\n}\n</script>\n\n<!-- Include React v12 -->\n<script src=\"https://stateb.us/code/statebus/extras/react.min.js\"></script>\n\n<!-- Include Statebus and Braid-HTTP -->\n<script src=\"https://cdn.jsdelivr.net/npm/statebus@7.0.28/extras/coffee.js\"></script>\n<script src=\"https://cdn.jsdelivr.net/npm/statebus@7.0.28/statebus.js\"></script>\n<script src=\"https://cdn.jsdelivr.net/npm/statebus@7.0.28/client-library.js\"></script>\n<script src=\"braid-http-client.js\"></script>\n\n<!-- Configure statebus -->\n<script>\n  bus.libs.react12.coffreact()\n  window.state = bus.state\n  //bus.libs.http_out('/*', '/')\n</script>\n"
  },
  {
    "path": "braid-http/demos/blog/package.json",
    "content": "{\n  \"dependencies\": {\n    \"express\": \"^4.19.2\",\n    \"http2-express-bridge\": \"^1.0.7\"\n  }\n}"
  },
  {
    "path": "braid-http/demos/blog/private-key",
    "content": "-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAvt7W6Y6IKMhIi/PGLWPMj+jWC/Ne2P2Yhqx9kuUN+hkXF8Ze\nFfIXpKI+JRLObDB7Me/8y4bgGSUQAPEowi75gk/jShvEhoMoV3G4B1ERmJ2xe02h\nUY3AiRuTQj7Z26y6C1/zJ+sCyPbeHBOyuCo341qjTYZMKKWfKDX0CGex+pLQupSY\nXsSq4NPYaVOlvxoqw4FBprxNXyjIs2PgX3LoGMNYVuBT4/F1B+Jwn62HNJdjGNFM\nW1272kC7/NAd+V0cIcvDWojpX3DQXCOty+EVc58grjIfSi1N4Gq9OsjM49BV3neN\nknHSlxZ1mNbthBCMc/7xWD0AVZhYdrquhDsKdQIDAQABAoIBAQCwg+S8mdPR42c0\nBn1//ItxiyJoaumMQvqLkXmQH8SNRibVFd5s7TZwSpquLnY4F53W0No4XsNgfaYP\nOPc4nbihir/oCUX2H9VvCPvnyFE9kphQL7rAcRddtOK6oOVZAqQLYPC+OdiZgbRm\nwtDFAEYvO+NKDqjf7ksCJONJ4pv13bczI7UDFhB/js1AhoLw+f65frdUw0smDNeO\n0wAWlEMym0fXFMIil0VGq5EMRsc1bGlDmYpjZfwD5VPacpq/BjmdUO2IHRYH24G1\nmPXQgphEgHYK4zgOltPkXDhVQzg79oCb9jwKCLoxHDeAtRMGKRohjjY7g6+Tcbok\n86L0TxABAoGBAOl8HQFL+rna/Edm6jWW7+FbxVFnCTv4UC35XGA+suc0s8glLCeV\nJ3PM/VrvTqGNjvmpjmUMJuHvpGR879VuyKcjnq2VlPax3lIN5t8M4dMe02jX3E8q\nGoREde/ElUv78/IdopNNV82KgLYVsCCfdYv7YpUr1r6JA5Mi1KraNbQFAoGBANFG\nu9A4DBbG/nCBYdqzqsLVxKctnDnGtgM5/BVxfe9KYtj4RgsYE12OOD7QKy6oKp0i\nef3CcJGP6QOy8NGocm1GCeWLmULPJ2dBa2QP22dWCv9tEjf9rc9OcH3GxwuWS4I4\n5V2/z2Q6/JI+1z2jvkdAe4zKkQ6cDUeHm6QR4rexAoGARFTaeEKwQixgoNTxvnVK\nBv5ApS4ueaqWbJ4J9vDikt/NrcmTPpJcVXusuixHKuiu60pALjp8NqtXxUD7P8+I\nUcIO8mZQjlJH6mO/KZAvlwXygUCLbW+5CvSuP1mdB/vYzQb3SXzdMX5TZPa78RA5\n6pnLpDSMSH742NrcupPaG7ECgYB7/AcLPhlTaUOXDeAWfPk/AvzF+syioip1UOYb\nIij1GsHjP3vn2LSoabjC5fufYbwVajaR859TbcokCeCIpd5dBPCaERGUtjvm/agN\nGSPoXd6YI1t21pEhz5vvuFN2du13UhP5bckF3biDQD2u5BK4DejkqaI04JkxdpLs\nKjiCkQKBgBV0DAhG+mOsMegv1LkK5Sg1XQt+zG8c11ESVPISEpLvaBZjVxVLdrKs\nR5ohNxC25kVbaxvfOTP4GWrJ1l8u7HKPVdqfY2j68JV3qyIZJ40a8HdEm5mr24HA\nfKQXk8BRRZb8OWrb6O1jeTuYkYvzwUGtqF7UzqY440YHxFED3V9j\n-----END RSA PRIVATE KEY-----\n"
  },
  {
    "path": "braid-http/demos/blog/server.js",
    "content": "assert = require('assert')\n\n// Blog Data\nvar resources = {\n    '/blog': [\n        {link: '/post/1'},\n        {link: '/post/2'},\n        {link: '/post/3'}\n    ],\n    '/post/1': {body: 'First post OMGGG!!!!'},\n    '/post/2': {body: `Once upon a time,\nI ate a big fish.\nIt was really tasty.`},\n    '/post/3': {body: \"It's nice when things come in threes.\"}\n}\nvar curr_version = () => [ resources['/blog'].length + '' ]\n\n\n\n// Subscription data\nvar subscriptions = {}\nvar rhash = (req) => JSON.stringify([req.headers.peer, req.url])\n\n\n// Create our HTTP bindings!\nvar braidify = require('../../index.js').http_server\nvar app = require('http2-express-bridge')(require('express'))\n\n// Middleware\napp.use(log_request)\napp.use(free_the_cors)\napp.use(braidify)\n\n// HTTP Routes\nfunction getter (req, res) {\n    // Make sure URL is valid\n    if (!(req.url in resources)) {\n        res.statusCode = 404\n        res.end()\n        return\n    }\n\n    // Honor any subscription request\n    if (req.subscribe) {\n        res.startSubscription({ onClose: _=> delete subscriptions[rhash(req)] })\n        subscriptions[rhash(req)] = res\n    } else\n        res.statusCode = 200\n\n    // Send the current version\n    res.sendUpdate({\n        version: curr_version(),\n        body: JSON.stringify(resources[req.url])\n    })\n\n    if (!req.subscribe)\n        res.end()\n}\napp.get('/blog',     getter)\napp.get('/post/:id', getter)\n\napp.put('/blog', async (req, res) => {\n    var patches = (await req.parseUpdate()).patches\n\n    console.log('Extending /blog with!', patches)\n    // assert(patches.length === 1)\n    // assert(patches[0].range === '[-0:-0]')\n\n    resources['/blog'].push(JSON.parse(patches[0].content))\n\n    for (var k in subscriptions) {\n        var [peer, url] = JSON.parse(k)\n        if (peer !== req.headers.peer && url === req.url)\n            subscriptions[k].sendUpdate({\n                version: curr_version(),\n                patches\n            })\n    }\n\n    res.statusCode = 200\n    res.end()\n})\napp.put('/post/:id', async (req, res) => {\n    var update = await req.parseUpdate()\n\n    console.log('Setting', req.url, 'with', update)\n    assert(typeof update.body === 'string')\n\n    resources[req.url] = JSON.parse(update.body)\n\n    for (var k in subscriptions) {\n        var [peer, url] = JSON.parse(k)\n        if (peer !== req.headers.peer && url === req.url)\n            subscriptions[k].sendUpdate({\n                version: curr_version(),\n                body: update.body\n            })\n    }\n\n    res.end()\n})\n\n// Now serve the HTML and client files\nsendfile = (f) => (req, res) => res.sendFile(f, {root:'../..'})\napp.get('/',                     sendfile('demos/blog/client.html'));\napp.get('/braid-http-client.js', sendfile('braid-http-client.js'))\n\n\n// Define Middleware\nfunction log_request (req, res, next) {\n    console.log(req.method, req.url)\n    next()\n}\nfunction free_the_cors (req, res, next) {\n    res.setHeader('Range-Request-Allow-Methods', 'PATCH, PUT')\n    res.setHeader('Range-Request-Allow-Units', 'json')\n    res.setHeader(\"Patches\", \"OK\")\n    var free_the_cors = {\n        \"Access-Control-Allow-Origin\": \"*\",\n        \"Access-Control-Allow-Methods\": \"OPTIONS, HEAD, GET, PUT, UNSUBSCRIBE\",\n        \"Access-Control-Allow-Headers\": \"subscribe, peer, version, parents, merge-type, content-type, patches, cache-control\"\n    }\n    Object.entries(free_the_cors).forEach(x => res.setHeader(x[0], x[1]))\n    if (req.method === 'OPTIONS') {\n        res.writeHead(200)\n        res.end()\n    } else\n        next()\n}\n\n\n// Launch the https server\nvar server = require('http2').createSecureServer(\n    {\n        cert:       require('fs').readFileSync('./certificate'),\n        key:        require('fs').readFileSync('./private-key'),\n        allowHTTP1: true\n    },\n    app\n)\n// server.setTimeout(0, x => console.log('Server timeout!', x))\n// console.log('Server timeouts:', server.timeout, server.keepAliveTimeout)\nserver.listen(3009, _=> console.log('listening on port 3009...'))\n"
  },
  {
    "path": "braid-http/demos/chat/README",
    "content": "To run the braidjs chat demo:\n\n    git clone https://github.com/braid-work/braidjs.git\n    cd braidjs/demos/chat\n    npm install express spdy parse-headers\n    node server.js\n\nThen open braidjs/demos/client.html in your browser.  It won't work yet\nbecause it doesn't trust the self-signed certificate.  To get around this,\nopen https://localhost:3009/ in your browser, and then:\n\n  - In Chrome, type the magic phrase \"thisisunsafe\" into the page\n  - In Firefox, click \"advanced\" and \"accept and continue\"\n  - In Safari, click through the warnings to \"proceed anyway\"\n"
  },
  {
    "path": "braid-http/demos/chat/certificate",
    "content": "-----BEGIN CERTIFICATE-----\nMIIDXTCCAkWgAwIBAgIJALgm2/aRZmh6MA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV\nBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX\naWRnaXRzIFB0eSBMdGQwHhcNMjAwOTEzMDU1NDI1WhcNMjEwOTEzMDU1NDI1WjBF\nMQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50\nZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB\nCgKCAQEAvt7W6Y6IKMhIi/PGLWPMj+jWC/Ne2P2Yhqx9kuUN+hkXF8ZeFfIXpKI+\nJRLObDB7Me/8y4bgGSUQAPEowi75gk/jShvEhoMoV3G4B1ERmJ2xe02hUY3AiRuT\nQj7Z26y6C1/zJ+sCyPbeHBOyuCo341qjTYZMKKWfKDX0CGex+pLQupSYXsSq4NPY\naVOlvxoqw4FBprxNXyjIs2PgX3LoGMNYVuBT4/F1B+Jwn62HNJdjGNFMW1272kC7\n/NAd+V0cIcvDWojpX3DQXCOty+EVc58grjIfSi1N4Gq9OsjM49BV3neNknHSlxZ1\nmNbthBCMc/7xWD0AVZhYdrquhDsKdQIDAQABo1AwTjAdBgNVHQ4EFgQUxcq1RR+P\ndDFTGHLBdHU4s5WmYaIwHwYDVR0jBBgwFoAUxcq1RR+PdDFTGHLBdHU4s5WmYaIw\nDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAgp03sPUbQO73H4iZq87h\nu+zDCiVjB88KTrXvkB4jQ7nLaGOwicMQKDp/fDl19HrF4gUdRZIRadPGxMS25yNz\nCOQ+kRy/wVLGU6G6XfCjkhF/zuI+X6Bgku+trYqaL/bZsIe3VHLnUdpGetrDR0yO\nn6YyGPkPm0uYOF8apt2/BZ4jRNACAThnKMf+0cyTi5xI3sz0E84adLcU5CIV3fcq\nxeREOP7QUnnXR/isR39HgUAtJ1JGKB/KK7PbUxzqD5iTGZU9KGLxNw+zDmnJA5BQ\nMdF3u583S7W7xLsZf6rAx3FQBUJ5puyC+qMeU/JhS0rzBR/auUXGpkBU2ZPb846H\nzg==\n-----END CERTIFICATE-----"
  },
  {
    "path": "braid-http/demos/chat/client.html",
    "content": "<script src=\"braid-http-client.js\"></script>\n<script type=\"module\">\n  // Imports\n  import { h, Component, render } from 'https://unpkg.com/preact?module'\n  import htm from 'https://unpkg.com/htm?module'\n  var html = htm.bind(h)\n\n  // Styles\n  var font = 'avenext, avenir, sans'\n  var fonty = {fontFamily: font}\n\n  // Widgets\n  var Chat = (chat) => html`\n      <div style=${fonty}>\n        <h1>It's a chat!</h1>\n        ${chat.map(post => Post(post))}\n        <input type=text id=new_stuff onkeydown=${hit_a_key}/>\n        <button onclick=${send_message}>go</button>\n      </div>`\n\n  var Post = (post) => html`\n      <div style=\"background-color: #eee; padding: 5; margin: 5;\">\n        ${post.text}\n      </div>`\n\n  // Actions\n  var hit_a_key = (e) => {\n      if (e.keyCode === 13 && !(e.ctrlKey || e.shiftKey)) {\n          e.preventDefault()\n          e.stopPropagation()\n          send_message()\n      }\n  }\n\n  // State\n  var chat = []\n  var curr_version = {}\n\n  // Render\n  var render_root = () => render(Chat(chat), document.body)\n  render_root()\n\n  var send_message = async () => {\n      // Update the text input\n      var input = document.getElementById('new_stuff'),\n          post = {text: input.value}\n      input.value = ''\n\n      // Update local state\n      chat.push(post)\n      curr_version['/chat'] = [(parseInt(curr_version['/chat'][0]) + 1) + '']\n\n      // Re-render UI\n      render_root()\n\n      // Send patch over the network\n      var patches = [{unit: 'json', range: '[-0:-0]', content: JSON.stringify(post)}]\n      var res = await braid_fetch(url, {method: 'put', patches, peer})\n      if (res.status === 200)\n          console.debug('put complete')\n      else\n          console.debug('put failed with', res.status)\n  }\n\n  // Networking\n  var path = '/chat',\n      url = new URL(path, window.location.href),\n      peer = Math.random().toString(36).substr(2)\n\n  function connect () {\n      braid_fetch(url, {subscribe: true, peer}).then(\n        response => response.subscribe(\n          update => {\n              console.log('Got update!', update)\n\n              curr_version[path] = update.version\n\n              // When we receive updates, they might come in the form of patches:\n              if (update.patches)\n                  chat = apply_patches(update.patches, chat)\n\n              // Or a complete version:\n              else\n                  // Beware the server doesn't send these yet.\n                  chat = JSON.parse(update.body)\n\n              render_root()\n          },\n          e => setTimeout(connect, 2000)\n        )\n      ).catch(e => setTimeout(connect, 2000))\n  }\n\n  connect()\n\n  // // The for await version is not currently used\n  // async function connect2 () {\n  //     try {\n  //         for await (var update of\n  //                    braid_fetch(url, {subscribe: true}, peer).subscription) {\n  //             curr_version[path] = update.version\n\n  //             // When we receive updates, they might come in the form of patches:\n  //             if (update.patches)\n  //                 chat = apply_patches(update.patches, chat)\n\n  //             // Or a complete version:\n  //             else\n  //                 // Beware the server doesn't send these yet.\n  //                 chat = JSON.parse(update.body)\n\n  //             render_root()\n  //         }\n  //     } catch (e) {\n  //         console.log('Reconnecting...')\n  //         setTimeout(connect2, 2000)\n  //     }\n  // }\n\n  function apply_patches (patches, object) {\n      for (var patch of patches)\n          // There are only two types of patches we could receive\n          if (patch.range === '')\n              // The entire chat in one patch\n              return JSON.parse(patch.content)\n          else\n              // A single new message, appended to the end\n              object.push(JSON.parse(patch.content))\n      return object\n  }\n\n</script>\n"
  },
  {
    "path": "braid-http/demos/chat/package.json",
    "content": "{\n  \"dependencies\": {\n    \"express\": \"^4.19.2\",\n    \"http2-express-bridge\": \"^1.0.7\"\n  }\n}"
  },
  {
    "path": "braid-http/demos/chat/private-key",
    "content": "-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAvt7W6Y6IKMhIi/PGLWPMj+jWC/Ne2P2Yhqx9kuUN+hkXF8Ze\nFfIXpKI+JRLObDB7Me/8y4bgGSUQAPEowi75gk/jShvEhoMoV3G4B1ERmJ2xe02h\nUY3AiRuTQj7Z26y6C1/zJ+sCyPbeHBOyuCo341qjTYZMKKWfKDX0CGex+pLQupSY\nXsSq4NPYaVOlvxoqw4FBprxNXyjIs2PgX3LoGMNYVuBT4/F1B+Jwn62HNJdjGNFM\nW1272kC7/NAd+V0cIcvDWojpX3DQXCOty+EVc58grjIfSi1N4Gq9OsjM49BV3neN\nknHSlxZ1mNbthBCMc/7xWD0AVZhYdrquhDsKdQIDAQABAoIBAQCwg+S8mdPR42c0\nBn1//ItxiyJoaumMQvqLkXmQH8SNRibVFd5s7TZwSpquLnY4F53W0No4XsNgfaYP\nOPc4nbihir/oCUX2H9VvCPvnyFE9kphQL7rAcRddtOK6oOVZAqQLYPC+OdiZgbRm\nwtDFAEYvO+NKDqjf7ksCJONJ4pv13bczI7UDFhB/js1AhoLw+f65frdUw0smDNeO\n0wAWlEMym0fXFMIil0VGq5EMRsc1bGlDmYpjZfwD5VPacpq/BjmdUO2IHRYH24G1\nmPXQgphEgHYK4zgOltPkXDhVQzg79oCb9jwKCLoxHDeAtRMGKRohjjY7g6+Tcbok\n86L0TxABAoGBAOl8HQFL+rna/Edm6jWW7+FbxVFnCTv4UC35XGA+suc0s8glLCeV\nJ3PM/VrvTqGNjvmpjmUMJuHvpGR879VuyKcjnq2VlPax3lIN5t8M4dMe02jX3E8q\nGoREde/ElUv78/IdopNNV82KgLYVsCCfdYv7YpUr1r6JA5Mi1KraNbQFAoGBANFG\nu9A4DBbG/nCBYdqzqsLVxKctnDnGtgM5/BVxfe9KYtj4RgsYE12OOD7QKy6oKp0i\nef3CcJGP6QOy8NGocm1GCeWLmULPJ2dBa2QP22dWCv9tEjf9rc9OcH3GxwuWS4I4\n5V2/z2Q6/JI+1z2jvkdAe4zKkQ6cDUeHm6QR4rexAoGARFTaeEKwQixgoNTxvnVK\nBv5ApS4ueaqWbJ4J9vDikt/NrcmTPpJcVXusuixHKuiu60pALjp8NqtXxUD7P8+I\nUcIO8mZQjlJH6mO/KZAvlwXygUCLbW+5CvSuP1mdB/vYzQb3SXzdMX5TZPa78RA5\n6pnLpDSMSH742NrcupPaG7ECgYB7/AcLPhlTaUOXDeAWfPk/AvzF+syioip1UOYb\nIij1GsHjP3vn2LSoabjC5fufYbwVajaR859TbcokCeCIpd5dBPCaERGUtjvm/agN\nGSPoXd6YI1t21pEhz5vvuFN2du13UhP5bckF3biDQD2u5BK4DejkqaI04JkxdpLs\nKjiCkQKBgBV0DAhG+mOsMegv1LkK5Sg1XQt+zG8c11ESVPISEpLvaBZjVxVLdrKs\nR5ohNxC25kVbaxvfOTP4GWrJ1l8u7HKPVdqfY2j68JV3qyIZJ40a8HdEm5mr24HA\nfKQXk8BRRZb8OWrb6O1jeTuYkYvzwUGtqF7UzqY440YHxFED3V9j\n-----END RSA PRIVATE KEY-----\n"
  },
  {
    "path": "braid-http/demos/chat/server.js",
    "content": "var assert = require('assert')\n\n// Chat Data\nvar resources = {\n    '/chat': [\n        {text: 'Hello!'},\n        {text: 'This is a post!'},\n        {text: 'This is a post-modern!'}\n    ]\n}\nvar chat_version = () => [resources['/chat'].length.toString()]\nvar post_versions = {}\n\n// Subscription data\nvar subscriptions = {}\nvar subscription_hash = (req) => JSON.stringify([req.headers.peer, req.url])\n\n// Create our HTTP bindings!\n//var braidify = require('../../braid-http-server')\nvar braidify = require('../../index.js').http_server\nvar app = require('http2-express-bridge')(require('express'))\n\n// Middleware\napp.use(free_the_cors)\napp.use(braidify)\n\napp.get('/chat', (req, res) => {\n    console.log('get for peer', req.headers.peer)\n    // Honor any subscription request\n    if (req.subscribe) {     // Using the new subscription feature braidify is adding to req & res\n        res.startSubscription({ onClose: _=> delete subscriptions[subscription_hash(req)] })\n        subscriptions[subscription_hash(req)] = res\n        console.log('We are subscribing at hash', subscription_hash(req))\n    } else {\n        res.statusCode = 200\n    }\n\n    // Send the current version\n    res.sendUpdate({\n        version: chat_version(),\n        body: JSON.stringify(resources['/chat'])\n    })\n\n    if (!req.subscribe)\n        res.end()\n})\n\napp.put('/chat', async (req, res) => {\n    var patches = await req.patches()  // Braidify adds .patches() to request objects\n\n    // Bug: Should return error code (40x?) for invalid request instead of crashing\n    assert(patches.length === 1)\n    assert(patches[0].range === '[-0:-0]')\n    assert(patches[0].unit === 'json')\n\n    resources['/chat'].push(JSON.parse(patches[0].content))\n\n    // Now send the data to all subscribers\n    for (var k in subscriptions) {\n        var [peer, url] = JSON.parse(k)\n        if (url === req.url  // Send only to subscribers of this URL\n            && peer !== req.headers.peer)  // Skip the peer that sent this PUT\n\n            subscriptions[k].sendUpdate({\n                version: chat_version(),\n                patches\n            })\n    }\n    \n    res.statusCode = 200\n    res.end()\n})\n\n// Now serve the HTML and client files\nvar sendfile = (f) => (req, res) => res.sendFile(require('path').join(__dirname, f))\napp.get('/',                   sendfile('client.html'));\napp.get('/braid-http-client.js', sendfile('../../braid-http-client.js'))\n\n// Free the CORS!\nfunction free_the_cors (req, res, next) {\n    console.log('free the cors!', req.method, req.url)\n\n    // Hey... these headers aren't about CORS!  Let's move them into the braid\n    // libraries:\n    res.setHeader('Range-Request-Allow-Methods', 'PATCH, PUT')\n    res.setHeader('Range-Request-Allow-Units', 'json')\n    res.setHeader(\"Patches\", \"OK\")\n    // ^^ Actually, it looks like we're going to delete these soon.\n\n    var free_the_cors = {\n        \"Access-Control-Allow-Origin\": \"*\",\n        \"Access-Control-Allow-Methods\": \"OPTIONS, HEAD, GET, PUT, UNSUBSCRIBE\",\n        \"Access-Control-Allow-Headers\": \"subscribe, peer, version, parents, merge-type, content-type, patches, cache-control\"\n    }\n    Object.entries(free_the_cors).forEach(x => res.setHeader(x[0], x[1]))\n    if (req.method === 'OPTIONS') {\n        res.writeHead(200)\n        res.end()\n    } else\n        next()\n}\n\n// Launch the https server\nvar server = require('http2').createSecureServer(\n    {\n        cert:       require('fs').readFileSync('./certificate'),\n        key:        require('fs').readFileSync('./private-key'),\n        allowHTTP1: true\n    },\n    app\n)\n// server.setTimeout(0, x => console.log('Server timeout!', x))\n// console.log('Server timeouts:', server.timeout, server.keepAliveTimeout)\nserver.listen(3009, _=> console.log('listening on port 3009...'))\n"
  },
  {
    "path": "braid-http/index.js",
    "content": "// This is the root file for require('braid-http').\n//\n// It combines the client and server files into one file.\n\nvar client = require('./braid-http-client'),\n    server = require('./braid-http-server')\n\nmodule.exports = {\n    fetch: client.fetch,\n    http_client: client.http,\n    http_server: server\n}\n"
  },
  {
    "path": "braid-http/index.mjs",
    "content": "// This is the root file for es modules:\n//\n//    import {fetch, http} from 'braid-http'\n//\n// This file combines the client and server files into one file.\n\nimport braid_client from './braid-http-client.js'\nimport braid_server from './braid-http-server.js'\n\nvar fetch = braid_client.fetch,\n    http_client = braid_client.http,\n    http_server = braid_server\n\nexport { fetch, http_client, http_server }\nexport default { fetch, http_client, http_server }\n"
  },
  {
    "path": "braid-http/package.json",
    "content": "{\n  \"name\": \"braid-http\",\n  \"version\": \"0.3.21\",\n  \"description\": \"An implementation of Braid-HTTP for Node.js and Browsers\",\n  \"scripts\": {\n    \"test\": \"node test/server.js\"\n  },\n  \"author\": \"Braid Working Group\",\n  \"repository\": \"braid-org/braidjs\",\n  \"homepage\": \"https://braid.org\",\n  \"files\": [\n    \"braid-http-client.js\",\n    \"braid-http-server.js\",\n    \"index.js\",\n    \"index.mjs\"\n  ],\n  \"main\": \"./index.js\",\n  \"exports\": {\n    \"require\": \"./index.js\",\n    \"import\": \"./index.mjs\"\n  },\n  \"browser\": {\n    \"node-web-streams\": false,\n    \"node-fetch\": false,\n    \"abort-controller\": false\n  },\n  \"dependencies\": {\n    \"abort-controller\": \"^3.0.0\",\n    \"node-fetch\": \"^2.6.1\",\n    \"parse-headers\": \"^2.0.3\",\n    \"web-streams-node\": \"^0.4.0\"\n  }\n}\n"
  },
  {
    "path": "braid-http/package.md",
    "content": "# package.json notes\n\nThis package is bundled as both a commonjs and es6-compatible NPM bundle. The\nfactor that enables this dual packaging is the \"exports\" key in the package.json\nfile:\n\n## exports\n\n- `require`: When this package is in a commonjs environment (e.g. default nodejs) \n  the ./index.js file will be the thing that is 'require'd.\n- `import`: When this package is in an es6 environment (e.g. bundler, modern nodejs,\n  modern browser) the ./index.mjs will be the thing 'import'ed.\n\n## dependencies\n\n\n  \n- `node-fetch`: When the http-client protocol is used, node-fetch supplies 'fetch'\n  for a nodejs client\n- `node-web-streams`: Although node-fetch is mostly isomorphic, its internal stream\n  is not the same as a web stream reader; we need it to have the same API.\n- `spdy`: This gives us http2.0 connection multiplexing with a 'natural http module\n  interface'. (http1.1 provides a max of 6 open conns)\n\n## Development Notes\n\nFor code that is intended to run in all environments (e.g. browser, node) and\npotentially pass through a bundler step, the following guidelines are helpful:\n\n- Use single-value module.exports in files, and named exports in wrappers.\n- If using globals, it's also important to use module.exports; for example: \n \n```\nfunction braid_fetch(...) { ... }\n\nif (typeof module !== 'undefined' && module.exports) {\n    module.exports = braid_fetch\n}\n```\n\nFor a complete list of reasons for the madness, and to learn more about the method\nwe've used to build this package, see https://redfin.engineering/node-modules-at-war-why-commonjs-and-es-modules-cant-get-along-9617135eeca1\n\nBecause we `require` certain libraries that are meant to be used in a nodejs environment only, we also need to provide a hint to bundlers that are targeting a browser environment NOT to load those libraries. This is what the `browser` field in `package.json` is for:\n\n```\n  \"browser\": {\n    \"node-web-streams\": false,\n    \"node-fetch\": false,\n    \"abort-controller\": false\n  }\n```\n\nIf we don't hint that these libraries should not be loaded in the browser, bundled code that depends on braidjs libraries will fail in the browser.\n\nSee also https://github.com/defunctzombie/package-browser-field-spec.\n"
  },
  {
    "path": "braid-http/readme.md",
    "content": "# NOTE: This project has moved to [braid-http](https://github.com/braid-org/braid-http)\n\n# Braid-HTTP\n\nThis polyfill library implements the [Braid-HTTP v04 protocol](https://github.com/braid-org/braid-spec/blob/master/draft-toomim-httpbis-braid-http-04.txt) in Javascript.  It gives browsers a `braid_fetch()` drop-in replacement for the `fetch()` API, and gives nodejs an `http` plugin, allowing them to speak Braid in a simple way.\n\nDeveloped in [braid.org](https://braid.org).\n\n\n## Installing\n\nBrowsers:\n\n```html\n<script src=\"https://unpkg.com/braid-http/braid-http-client.js\"></script>\n<script>\n  // To live on the cutting edge, you can now replace the browser's fetch() if desired:\n  // window.fetch = braid_fetch\n</script>\n```\n\nNode.js:\n\n```shell\nnpm install braid-http\n```\n\n```javascript\n// Import with require()\nrequire('braid-http').fetch       // A polyfill for require('node-fetch')\nrequire('braid-http').http_client // A polyfill for require('http') clients\nrequire('braid-http').http_server // A polyfill for require('http') servers\n\n// Or as es6 module\nimport {fetch, http_client, http_server} from 'braid-http'\n```\n\n## Using it in Browsers\n\nThis library adds a `{subscribe: true}` option to `fetch()`, and lets you\naccess the result of a subscription with two new fields on the fetch response:\n\n- `response.subscribe( update => ... )`\n- `response.subscription`: an iterator that can be used with `for await`\n\n### Example Subscription with Promises\n\nHere is an example of subscribing to a Braid resource using promises:\n\n```javascript\nfetch('https://braid.org/chat', {subscribe: true}).then(\n    res => res.subscribe(\n        (update) => {\n            console.log('We got a new update!', update)\n            // {\n            //   version: [\"me\"],\n            //   parents: [\"mom\", \"dad\"],\n            //   patches: [{unit: \"json\", range: \".foo\", content: \"3\"}]\n            //   body:    \"3\"\n            // }\n            //\n            // Note that `update` will contain either patches *or* body\n        }\n    )\n)\n```\n\nIf you want automatic reconnections, add two error handlers like this:\n\n```javascript\nfunction connect() {\n    fetch('https://braid.org/chat', {subscribe: true}).then(\n        res => res.subscribe(\n            (update) => {\n                console.log('We got a new update!', update)\n                // Do something with the update\n            },\n            e => setTimeout(connect, 1000)\n        )\n    ).catch(e => setTimeout(connect, 1000))\n}\nconnect()\n```\n\n### Example Subscription with Async/Await\n\n```javascript\nasync function connect () {\n    try {\n        (await fetch('/chat', {subscribe: true})).subscribe(\n            (update) => {\n                // We got a new update!\n            },\n            () => setTimeout(connect, 1000)\n        )\n    } catch (e) {\n        setTimeout(connect, 1000)\n    }\n}\n```\n\n### Example Subscription with `for await`\n\n```javascript\nasync function connect () {\n    try {\n        var subscription_iterator = fetch('/chat', {subscribe: true}).subscription\n        for await (var update of subscription_iterator) {\n            // Updates might come in the form of patches:\n            if (update.patches)\n                chat = apply_patches(update.patches, chat)\n\n            // Or complete snapshots:\n            else\n                // Beware the server doesn't send these yet.\n                chat = JSON.parse(update.body)\n\n            render_stuff()\n        }\n    } catch (e) {\n        console.log('Reconnecting...')\n        setTimeout(connect, 4000)\n    }\n}\n```\n\n## Using it in Nodejs\n\n### Example Nodejs server with `require('http')`\n\nBraidify adds these fields and methods to requests and responses:\n- `req.subscribe`\n- `req.startSubscription({onClose: cb})`\n- `await req.parseUpdate()`\n- `res.sendUpdate()`\n\nUse it like this:\n\n```javascript\nvar braidify = require('braid-http').http_server\n// or:\nimport {http_server as braidify} from 'braid-http'\n\nrequire('http').createServer(\n    (req, res) => {\n        // Add braid stuff to req and res\n        braidify(req, res)\n\n        // Now use it\n        if (req.subscribe)\n            res.startSubscription({ onClose: _=> null })\n            // startSubscription automatically sets statusCode = 209\n        else\n            res.statusCode = 200\n\n        // Send the current version\n        res.sendUpdate({\n            version: ['greg'],\n            body: JSON.stringify({greg: 'greg'})\n        })\n    }\n).listen(9935)\n```\n\n### Example Nodejs server with `require('express')`\n\nWith `express`, you can simply call `app.use(braidify)` to get braid features\nadded to every request and response.\n\n```javascript\nvar braidify = require('braid-http').http_server\n// or:\nimport {http_server as braidify} from 'braid-http'\n\nvar app = require('express')()\n\napp.use(braidify)    // Add braid stuff to req and res\n\napp.get('/', (req, res) => {\n    // Now use it\n    if (req.subscribe)\n        res.startSubscription({ onClose: _=> null })\n        // startSubscription automatically sets statusCode = 209\n    else\n        res.statusCode = 200\n\n    // Send the current version\n    res.sendUpdate({\n        version: ['greg'],\n        parents: ['gr','eg'],\n        body: JSON.stringify({greg: 'greg'})\n    })\n\n    // Or you can send patches like this:\n    // res.sendUpdate({\n    //     version: ['greg'],\n    //     parents: ['gr','eg'],\n    //     patches: [{range: '.greg', unit: 'json', content: '\"greg\"'}]\n    // })\n})\n\nrequire('http').createServer(app).listen(8583)\n```\n\n\n\n### Example Nodejs client with `require('http')`\n\n```javascript\n// Use this line if necessary for self-signed certs\n// process.env[\"NODE_TLS_REJECT_UNAUTHORIZED\"] = 0\n\nvar https = require('braid-http').http_client(require('https'))\n// or:\n// import braid_http from 'braid-http'\n// https = braid_http.http_client(require('https'))\n\nhttps.get(\n   'https://braid.org/chat',\n   {subscribe: true},\n   (res) => {\n      res.on('update', (update) => {\n          console.log('well we got one', update)\n      })\n   }\n)\n```\n\nTo get auto-reconnections use:\n\n```javascript\nfunction connect () {\n    https.get(\n        'https://braid.org/chat',\n        {subscribe: true},\n        (res) => {\n            res.on('update', (update) => {\n                // {\n                //   version: [\"me\"],\n                //   parents: [\"mom\", \"dad\"],\n                //   patches: [{unit: \"json\", range: \".foo\", content: \"3\"}]\n                //   body:    \"3\"\n                // }\n                //   // Update will contain either patches *or* body, but not both\n                console.log('We got a new update!', update)\n            })\n\n            res.on('end',   e => setTimeout(connect, 1000))\n            res.on('error', e => setTimeout(connect, 1000))\n        })\n}\nconnect()\n```\n\n\n### Example Nodejs client with `fetch()`\n\n```javascript\nvar fetch = require('braid-http').fetch\n// or:\nimport {fetch} from 'braid-http'\n\n// process.env[\"NODE_TLS_REJECT_UNAUTHORIZED\"] = 0\n\nfetch('https://localhost:3009/chat',\n      {subscribe: true}).andThen(\n          x => console.log('Got ', x)\n      )\n```\n\nNote: the current version of `node-fetch` doesn't properly throw errors when a\nresponse connection dies, and thus you cannot attach a `.catch()` handler to\nautomatically reconnect.  (See\n[issue #980](https://github.com/node-fetch/node-fetch/issues/980) and\n[#753](https://github.com/node-fetch/node-fetch/issues/753).)  We recommend\nusing the `http` library (below) for requests on nodejs instead.\n"
  },
  {
    "path": "braid-http/test/client.html",
    "content": "<script src=\"/braid-http-client.js\"></script>\n<script type=module>\nvar fetch = braid_fetch\n\n// Setup the tests\nwindow.test_results = ''\nfunction result (message) {\n    console.log(message)\n    test_results += message + '\\n'\n\n    // We test server restarts manually.\n    // If the main tests are complete:\n    if (document.getElementById('passfail')) {\n        // Then let's add our reconnection results\n        if (!document.getElementById('reconnections'))\n            document.writeln('<h2 id=reconnections>Reconnection Results</h2><pre>')\n\n        document.writeln(message)\n    }\n}\n\n\n// Reading data from the server\nfunction read_test1 () {\n    fetch('/json', {subscribe: true}).then(\n        res => res.subscribe(\n            update => result('Read 1 ' + JSON.stringify(update) + '!'),\n            (e) => {\n                result('Read 1 connection died')\n                console.log('Try again 1', e)\n                setTimeout(read_test1, 1000)\n            }\n        )).catch(e => {\n            result('Read 1 connection died')\n            console.log('Try again 1', e)\n            setTimeout(read_test1, 1000)\n        })\n}\n\n\nasync function read_test2 () {\n    try {\n        (await fetch('/json', {subscribe: true})).subscribe(\n            update => result('Read 2 ' + JSON.stringify(update) + '!'),\n            (e) => {\n                result('Read 2 connection died')\n                console.log('Try again 2', e)\n                setTimeout(read_test2, 1000)\n            }\n        )\n    } catch (e) {\n        result('Read 2 connection died')\n        console.log('Try again 2', e)\n        setTimeout(read_test2, 1000)\n    }\n}\n\nasync function read_test3 () {\n    try {\n        for await (var update of (await fetch('/json', {subscribe: true})).subscription) \n        result('Read 3 ' + JSON.stringify(update) + '!')\n    } catch (e) {\n        result('Read 3 connection died')\n        console.log('Try again 3', e)\n        setTimeout(read_test3, 1000)\n    }\n}\n\nsetTimeout(read_test1, 0)\nsetTimeout(read_test2, 10)\nsetTimeout(read_test3, 20)\n\n\n// Writing data to the server\nfunction write_test1 () {\n    fetch('/json', {\n        version: ['test1'],\n        patches: {unit: 'json', range: '[0]', content: '\"test1\"'},\n        method: 'PUT'\n    }).then(async res => {\n        result('Write test 1 returned ' + res.status)\n    })\n}\nfunction write_test2 () {\n    fetch('/json', {\n        version: ['test2'],\n        patches: [{unit: 'json', range: '[0]', content: '\"test2\"'}],\n        method: 'PUT'\n    }).then(async res => {\n        result('Write test 2 returned ' + res.status)\n    })\n}\n\nfunction write_test3 () {\n    fetch('/json', {\n        version: ['test3'],\n        patches: [\n            {unit: 'jsonpath', range: '[0]', content: '\"test3\"'},\n            {unit: 'jsonpath', range: '[1]', content: '\"test3\"'},\n            {unit: 'jsonpath', range: '[2]', content: '\"test3\"'}\n        ],\n        method: 'PUT'\n    }).then(async res => {\n        result('Write test 3 returned ' + res.status)\n    })\n}\n\nfunction write_test4 () {\n    fetch('/json', {\n        version: ['test4'],\n        patches: [],\n        method: 'PUT'\n    }).then(async res => {\n        result('Write test 4 returned '+ res.status)\n    })\n}\n\nsetTimeout(write_test1, 100)\nsetTimeout(write_test2, 110)\nsetTimeout(write_test3, 120)\nsetTimeout(write_test4, 130)\n\n\n// Evaluate all the tests\nfunction evaluate_tests () {\n    fetch('/test-responses.txt').then(res => res.text()).then(text => {\n        window.known_results = text\n        document.writeln('<body></body>')\n        if (known_results === test_results) {\n            document.body.style.backgroundColor = '#efe'\n            document.writeln('<h1 id=passfail>Passed!</h1>')\n        } else {\n            document.body.style.backgroundColor = '#fee'\n            document.writeln('<h1 id=passfail>Failed! :(</h1>')\n        }\n        document.writeln(\"<h2>Your Server's Responses</h2><pre>\" + test_results + '</pre>')\n        document.writeln(\"<h2>Known Server Responses</h2><pre>\" + known_results + '</pre>')\n    })\n}\nsetTimeout(evaluate_tests, 300)\n</script>\n"
  },
  {
    "path": "braid-http/test/readme.md",
    "content": "# To Test Braid-HTTP\n\nRun the server with:\n\n```\nnode server.js\n```\n\n### Test the server alone\n\nRun this at your command-line:\n\n```\n$ curl -v -H Subscribe:true http://localhost:9000/json\n```\n\nYou should see this:\n\n```\n*   Trying 127.0.0.1:9000...\n* Connected to localhost (127.0.0.1) port 9000 (#0)\n> GET /json HTTP/1.1\n> Host: localhost:9000\n> User-Agent: curl/7.79.1\n> Accept: */*\n> Subscribe:true\n> \n* Mark bundle as not supporting multiuse\n< HTTP/1.1 209 unknown\n< Range-Request-Allow-Methods: PATCH, PUT\n< Range-Request-Allow-Units: json\n< content-type: application/json\n< subscribe: true\n< cache-control: no-cache, no-transform\n< X-Accel-Buffering: no\n< Date: Wed, 29 May 2024 13:05:38 GMT\n< Connection: keep-alive\n< Keep-Alive: timeout=5\n< Transfer-Encoding: chunked\n< \nVersion: \"test\"\nParents: \"oldie\"\nContent-Length: 16\n\n{\"this\":\"stuff\"}\n\nVersion: \"test1\"\nParents: \"oldie\", \"goodie\"\nhash: 42\n:status: 115\nContent-Length: 1\nContent-Range: json [1]\n\n1\n\nVersion: \"test2\"\nContent-Length: 1\nContent-Range: json [2]\n\n2\n\nVersion: \"test3\"\nPatches: 2\n\nContent-Length: 1\nContent-Range: json [3]\nhash: 43\n\n3\n\nContent-Length: 1\nContent-Range: json [4]\n\n4\n\nVersion: \"another!\"\nContent-Length: 3\n\n\"!\"\n\n```\n...and the connection should stay open until you hit `C-c`.\n\n\n### Test the client against the server\n\nOpen a browser to:\n```\nhttp://localhost:9000/\n```\n\nThe page will run a series of GET+subscribe and PUT tests, and then turn green\nif they succeed, and red if they failed.\n\nIf you kill and restart the server, the browser should wait a second,\nreconnect and then display a **Reconnection Results** section that looks like\nthis:\n\n```\nRead 1 connection died\nRead 3 connection died\nRead 2 connection died\nRead 1 {\"version\":\"test\",\"parents\":[\"oldie\"],\"body\":\"{\\\"this\\\":\\\"stuff\\\"}\"}!\nRead 1 {\"version\":\"test1\",\"parents\":[\"oldie\",\"goodie\"],\"patches\":[{\"unit\":\"json\",\"range\":\"[1]\",\"content\":\"1\"}]}!\nRead 1 {\"version\":\"test2\",\"patches\":[{\"unit\":\"json\",\"range\":\"[2]\",\"content\":\"2\"}]}!\nRead 1 {\"version\":\"test3\",\"patches\":[{\"headers\":{\"content-length\":\"1\",\"content-range\":\"json [3]\"},\"unit\":\"json\",\"range\":\"[3]\",\"content\":\"3\"},{\"headers\":{\"content-length\":\"1\",\"content-range\":\"json [4]\"},\"unit\":\"json\",\"range\":\"[4]\",\"content\":\"4\"}]}!\nRead 3 {\"version\":\"test\",\"parents\":[\"oldie\"],\"body\":\"{\\\"this\\\":\\\"stuff\\\"}\"}!\nRead 2 {\"version\":\"test\",\"parents\":[\"oldie\"],\"body\":\"{\\\"this\\\":\\\"stuff\\\"}\"}!\nRead 2 {\"version\":\"test1\",\"parents\":[\"oldie\",\"goodie\"],\"patches\":[{\"unit\":\"json\",\"range\":\"[1]\",\"content\":\"1\"}]}!\nRead 2 {\"version\":\"test2\",\"patches\":[{\"unit\":\"json\",\"range\":\"[2]\",\"content\":\"2\"}]}!\nRead 2 {\"version\":\"test3\",\"patches\":[{\"headers\":{\"content-length\":\"1\",\"content-range\":\"json [3]\"},\"unit\":\"json\",\"range\":\"[3]\",\"content\":\"3\"},{\"headers\":{\"content-length\":\"1\",\"content-range\":\"json [4]\"},\"unit\":\"json\",\"range\":\"[4]\",\"content\":\"4\"}]}!\nRead 1 {\"version\":\"another!\",\"body\":\"!\"}!\nRead 3 {\"version\":\"another!\",\"body\":\"!\"}!\nRead 2 {\"version\":\"another!\",\"body\":\"!\"}!\n```\n\n\n### Debugging Advice\n\nIf the client tests fail, plug them into https://glittle.org/diff to see\nwhat's wrong.\n\nYou can capture a request in unix with `nc -l 9000 > test-request.txt` to listen to\nport 9000 while your browser initiates a request, and then capture a response\nwith `nc localhost 9000 < test-request.txt` to read the request from disk and send\nit to a server running on port 9000.\n"
  },
  {
    "path": "braid-http/test/server.js",
    "content": "var braidify = require('../braid-http-server.js')\nvar sendfile = (f, req, res) => res.end(require('fs').readFileSync(require('path').join(__dirname, f)))\nrequire('http').createServer(\n    (req, res) => {\n\n        // Braidifies our server\n        braidify(req, res)\n\n        console.log('Request:', req.url, req.method,\n                    req.subscribe ? ('Subscribe: ' + req.subscribe)\n                    : 'no subscription')\n\n        // We'll serve Braid at the /json route!\n        if (req.url === '/json' && req.method === 'GET') {\n            res.setHeader('content-type', 'application/json')\n            // res.setHeader('accept-subscribe', 'true')\n\n            // If the client requested a subscription, let's honor it!\n            if (req.subscribe)\n                res.startSubscription()\n\n            // Send the current version\n            res.sendUpdate({\n                version: ['test'],\n                parents: ['oldie'],\n                body: JSON.stringify({this: 'stuff'})\n            })\n\n            if (req.subscribe) {\n                // Send a patch\n                res.sendUpdate({\n                    VersiOn: ['test1'],             // Upper/lowercase is ignored\n                    ParEnts: ['oldie', 'goodie'],\n                    patch: {unit: 'json', range: '[1]', content: '1'},\n                    hash: '42',\n                    ':status': '115'\n                })\n\n                // Send a patch as array\n                res.sendUpdate({\n                    Version: ['test2'],\n                    patch: {unit: 'json', range: '[2]', content: '2'}\n                })\n\n                // Send two patches as array\n                res.sendUpdate({\n                    version: ['test3'],\n                    patches: [{unit: 'json', range: '[3]', content: '3', hash: '43'},\n                              {unit: 'json', range: '[4]', content: '4'}]\n                })\n\n                // Simulate an update after the fact\n                setTimeout(() => res.sendUpdate({version: ['another!'], body: '\"!\"'}), 200)\n            }\n\n            // End the response, if this isn't a subscription\n            if (!req.subscribe) {\n                res.statusCode = 200\n                res.end()\n            }\n        }\n\n\n        // We'll accept Braid at the /json PUTs!\n        if (req.url === '/json' && req.method === 'PUT') {\n            req.parseUpdate().then(update => {\n                console.log('We got PUT', req.version, 'update', update)\n                res.statusCode = 200\n                res.end()\n            })\n        }\n\n        // Static HTML routes here:\n        else if (req.url === '/')\n            sendfile('client.html', req, res)\n        else if (req.url === '/braid-http-client.js')\n            sendfile('../braid-http-client.js', req, res)\n        else if (req.url === '/test-responses.txt')\n            sendfile('test-responses.txt', req, res)\n    }\n\n).listen(9000, () => console.log(\"Listening on http://localhost:9000...\"))\n"
  },
  {
    "path": "braid-http/test/test-request.txt",
    "content": "GET /json HTTP/1.1\r\nHost: localhost:9000\r\nConnection: keep-alive\r\nCache-Control: max-age=0\r\nsec-ch-ua: \"Not/A)Brand\";v=\"99\", \"Google Chrome\";v=\"115\", \"Chromium\";v=\"115\"\r\npeer: zxam86iouzp\r\nsec-ch-ua-mobile: ?0\r\nUser-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/115.0.0.0 Safari/537.36\r\nsubscribe: true\r\nsec-ch-ua-platform: \"macOS\"\r\nAccept: */*\r\nSec-Fetch-Site: same-origin\r\nSec-Fetch-Mode: cors\r\nSec-Fetch-Dest: empty\r\nReferer: http://localhost:9000/\r\nAccept-Encoding: gzip, deflate, br\r\nAccept-Language: en-US,en;q=0.9\r\n\r\n"
  },
  {
    "path": "braid-http/test/test-responses.txt",
    "content": "Read 1 {\"version\":[\"test\"],\"parents\":[\"oldie\"],\"body\":\"{\\\"this\\\":\\\"stuff\\\"}\"}!\nRead 1 {\"version\":[\"test1\"],\"parents\":[\"oldie\",\"goodie\"],\"patches\":[{\"unit\":\"json\",\"range\":\"[1]\",\"content\":\"1\"}],\"extra_headers\":{\"hash\":\"42\",\":status\":\"115\"}}!\nRead 1 {\"version\":[\"test2\"],\"patches\":[{\"unit\":\"json\",\"range\":\"[2]\",\"content\":\"2\"}]}!\nRead 1 {\"version\":[\"test3\"],\"patches\":[{\"unit\":\"json\",\"range\":\"[3]\",\"content\":\"3\",\"extra_headers\":{\"hash\":\"43\"}},{\"unit\":\"json\",\"range\":\"[4]\",\"content\":\"4\"}]}!\nRead 2 {\"version\":[\"test\"],\"parents\":[\"oldie\"],\"body\":\"{\\\"this\\\":\\\"stuff\\\"}\"}!\nRead 2 {\"version\":[\"test1\"],\"parents\":[\"oldie\",\"goodie\"],\"patches\":[{\"unit\":\"json\",\"range\":\"[1]\",\"content\":\"1\"}],\"extra_headers\":{\"hash\":\"42\",\":status\":\"115\"}}!\nRead 2 {\"version\":[\"test2\"],\"patches\":[{\"unit\":\"json\",\"range\":\"[2]\",\"content\":\"2\"}]}!\nRead 2 {\"version\":[\"test3\"],\"patches\":[{\"unit\":\"json\",\"range\":\"[3]\",\"content\":\"3\",\"extra_headers\":{\"hash\":\"43\"}},{\"unit\":\"json\",\"range\":\"[4]\",\"content\":\"4\"}]}!\nRead 3 {\"version\":[\"test\"],\"parents\":[\"oldie\"],\"body\":\"{\\\"this\\\":\\\"stuff\\\"}\"}!\nWrite test 1 returned 200\nWrite test 2 returned 200\nWrite test 3 returned 200\nWrite test 4 returned 200\nRead 1 {\"version\":[\"another!\"],\"body\":\"\\\"!\\\"\"}!\nRead 2 {\"version\":[\"another!\"],\"body\":\"\\\"!\\\"\"}!\nRead 3 {\"version\":[\"another!\"],\"body\":\"\\\"!\\\"\"}!\n"
  },
  {
    "path": "json-patch/apply-patch.js",
    "content": "function apply_patch (obj, range, content) {\n\n    // Descend down a bunch of objects until we get to the final object\n    // The final object can be a slice\n    // Set the value in the final object\n\n    var path = range,\n        new_stuff = content\n\n    var path_segment = /^(\\.?([^\\.\\[]+))|(\\[((-?\\d+):)?(-?\\d+)\\])|\\[(\"(\\\\\"|[^\"])*\")\\]/\n    var curr_obj = obj,\n        last_obj = null\n\n    // Handle negative indices, like \"[-9]\" or \"[-0]\"\n    function de_neg (x) {\n        return x[0] === '-'\n            ? curr_obj.length - parseInt(x.substr(1), 10)\n            : parseInt(x, 10)\n    }\n\n    // Now iterate through each segment of the range e.g. [3].a.b[3][9]\n    while (true) {\n        var match = path_segment.exec(path),\n            subpath = match ? match[0] : '',\n            field = match && match[2],\n            slice_start = match && match[5],\n            slice_end = match && match[6],\n            quoted_field = match && match[7]\n\n        // The field could be expressed as [\"nnn\"] instead of .nnn\n        if (quoted_field) field = JSON.parse(quoted_field)\n\n        slice_start = slice_start && de_neg(slice_start)\n        slice_end = slice_end && de_neg(slice_end)\n\n        // console.log('Descending', {curr_obj, path, subpath, field, slice_start, slice_end, last_obj})\n\n        // If it's the final item, set it\n        if (path.length === subpath.length) {\n            if (!subpath) return new_stuff\n            else if (field) {                           // Object\n                if (new_stuff === undefined)\n                    delete curr_obj[field]              // - Delete a field in object\n                else\n                    curr_obj[field] = new_stuff         // - Set a field in object\n            } else if (typeof curr_obj === 'string') {  // String\n                console.assert(typeof new_stuff === 'string')\n                if (!slice_start) {slice_start = slice_end; slice_end = slice_end+1}\n                if (last_obj) {\n                    var s = last_obj[last_field]\n                    last_obj[last_field] = (s.slice(0, slice_start)\n                                            + new_stuff\n                                            + s.slice(slice_end))\n                } else\n                    return obj.slice(0, slice_start) + new_stuff + obj.slice(slice_end)\n            } else                                     // Array\n                if (slice_start)                       //  - Array splice\n                    [].splice.apply(curr_obj, [slice_start, slice_end-slice_start]\n                                    .concat(new_stuff))\n            else {                                     //  - Array set\n                console.assert(slice_end >= 0, 'Index '+subpath+' is too small')\n                console.assert(slice_end <= curr_obj.length - 1,\n                               'Index '+subpath+' is too big')\n                curr_obj[slice_end] = new_stuff\n            }\n\n            return obj\n        }\n\n        // Otherwise, descend down the path\n        console.assert(!slice_start, 'No splices allowed in middle of path')\n        last_obj = curr_obj\n        last_field = field || slice_end\n        curr_obj = curr_obj[last_field]\n        path = path.substr(subpath.length)\n    }\n}\n\nmodule.exports = apply_patch\n"
  },
  {
    "path": "json-patch/package.json",
    "content": "{\n  \"name\": \"@braid.org/json-patch\",\n  \"version\": \"1.0.6\",\n  \"description\": \"Patch JSON\",\n  \"main\": \"apply-patch.js\",\n  \"scripts\": {\n    \"test\": \"node test.js\"\n  },\n  \"author\": \"\",\n  \"license\": \"ISC\"\n}\n"
  },
  {
    "path": "json-patch/readme.md",
    "content": "# JSON Patch\n\nThis library patches JSON objects using the Braid range-patch format.\n\nUsing it:\n```javascript\nvar patch = require('@braid.org/json-patch')\nvar json = {a: \"foo\", b: [1,2,3]}\n\n// Replace the 2 with \"a new string\"\npatch(json, '.b[3]', \"a new string\")\n\nconsole.log(json)   // {a: \"foo\", b: [1, \"a new string\", 3]}\n\n// Edit that string\npatch(json, '.b[3][1:5]', 'n old')\n\nconsole.log(json)   // {a: \"foo\", b: [1, \"an old string\", 3]}\n```\n\nThis library mutates your JSON objects in-place.  If you want a copy, then\nclone your object first.\n"
  },
  {
    "path": "json-patch/test.js",
    "content": "var assert = require('assert')\n\nvar patch = require('.')\nvar json = {a: \"foo\", b: [1,2,3]}\n\n// Replace 2 with 99\npatch(json, '.b[1]', 99)\n\nconsole.log(json)\nassert.deepEqual(json, {a: \"foo\", b: [1, 99, 3]})\n\n// Insert a string\npatch(json, '.b[1:1]', \"a new thing\")\n\nconsole.log(json)\nassert.deepEqual(json, {a: \"foo\", b: [1, \"a new thing\", 99, 3]})\n\n// Splice that string\npatch(json, '.b[1][1:5]', \"n old\")\n\nconsole.log(json)\nassert.deepEqual(json, {a: \"foo\", b: [1, \"an old thing\", 99, 3]})\n\n// Test case: Delete a field in an object\npatch(json, \".a\", undefined)\nconsole.log(json)\nassert.deepEqual(json, { b: [1, \"an old thing\", 99, 3] })\n\n// Test case: Set a field in a nested object\njson = { a: { c: \"bar\" }, b: [1, 2, 3] }\npatch(json, \".a.c\", \"baz\")\nconsole.log(json)\nassert.deepEqual(json, { a: { c: \"baz\" }, b: [1, 2, 3] })\n\n// Test case: Splice an array with negative index\npatch(json, \".b[-1:-0]\", [4, 5])\nconsole.log(json)\nassert.deepEqual(json, { a: { c: \"baz\" }, b: [1, 2, 4, 5] })\n\n// Test case: append stuff to the array\npatch(json, \".b[-0:-0]\", [9, 8])\nconsole.log(json)\nassert.deepEqual(json, { a: { c: \"baz\" }, b: [1, 2, 4, 5, 9, 8] })\n\n// Test case: Set a value in a deeply nested object\njson = { a: { c: { d: { e: \"foo\" } } }, b: [1, 2, 3] }\npatch(json, \".a.c.d.e\", \"bar\")\nconsole.log(json)\nassert.deepEqual(json, { a: { c: { d: { e: \"bar\" } } }, b: [1, 2, 3] })\n\nconsole.log(\"All tests passed!\")\n"
  },
  {
    "path": "kernel/antimatter.js",
    "content": "module.exports = require.antimatter = (node) => ({\n\n    set (args) {\n        var {key, patches, version, parents, origin} = args\n        var resource = node.resource_at(key)\n        if (args.is_new) {\n            // Next, we want to remember some information for the purposes of\n            // acknowledgments, namely, we'll remember how many people we\n            // forward this version along to (we'll actually do the forwarding\n            // right after this), and we also remember whether or not we are\n            // the originators of this version (if we originated the version,\n            // then we'll be responsible for sending the \"global\" ack when the\n            // time is right)..\n\n            var origin_is_keepalive = origin && resource.keepalive_peers[origin.id]\n            resource.acks_in_process[version] = {\n                origin: origin_is_keepalive && origin,\n                count: Object.keys(resource.keepalive_peers).length\n            }\n            if (origin_is_keepalive)\n                // If the origin is a keepalive_peer, then since we've already\n                // seen it from them, we can decrement count\n                resource.acks_in_process[version].count--\n\n            assert(resource.acks_in_process[version].count >= 0,\n                   node.pid, 'Acks have below zero! Proof:',\n                   {origin, key, version,\n                    acks_in_process: resource.acks_in_process[version]})\n        }\n        else if (resource.acks_in_process[version])\n            // Q: In what situation is acks_in_process[version] false?\n            //\n            // A: Good question; the answer is that in some cases we will\n            // delete acks_in_process for a version if, say, we receive a\n            // global ack for a descendant of this version, or if we\n            // receive a fissure.. in such cases, we simply ignore the\n            // ack process for that version, and rely on a descendant\n            // version getting globally acknowledged.\n\n            // Now if we're not going to add the version, most commonly\n            // because we already possess the version, there is another\n            // situation that can arise, namely, someone that we forwarded the\n            // version to sends it back to us... How could that happen? Well,\n            // they may have heard about this version from someone we sent it\n            // to, before hearing about it from us (assuming some pretty gross\n            // latency)..  anyway, if it happens, we can treat it like an ACK\n            // for the version, which is why we decrement \"count\" for\n            // acks_in_process for this version; a similar line of code exists\n            // inside \"node.ack\"\n\n            resource.acks_in_process[version].count--\n\n        // Since we may have messed with the ack count, we check it to see if\n        // it has gone to 0, and if it has, take the appropriate action (which\n        // is probably to send a global ack)\n\n        check_ack_count(node, key, resource, version)\n    },\n\n    ack (args) {\n        var {key, valid, seen, version, origin} = args\n        var resource = node.resource_at(key)\n        if (seen === 'local') {\n            if (resource.acks_in_process[version]) {\n                log('node.ack: Got a local ack! Decrement count to',\n                    resource.acks_in_process[version].count - 1)\n                resource.acks_in_process[version].count--\n                check_ack_count(node, key, resource, version)\n            }\n        } else if (seen === 'global') {\n            if (!resource.time_dag[version]) return\n            \n            var ancs = resource.ancestors(resource.unack_boundary)\n            if (ancs[version]) return\n            \n            ancs = resource.ancestors(resource.acked_boundary)\n            if (ancs[version]) return\n            \n            add_full_ack_leaf(node, resource, version)\n            node.bindings(key).forEach(pipe => {\n                if (pipe.send && (pipe.id !== origin.id))\n                    pipe.send({method: 'ack', key, version, seen: 'global'})\n            })\n        }\n    },\n\n    fissure ({key, fissure, origin}) {\n        var resource = node.resource_at(key)\n        var fkey = fissure.a + ':' + fissure.b + ':' + fissure.conn\n        if (!resource.fissures[fkey]) {\n            resource.fissures[fkey] = fissure\n            \n            resource.acks_in_process = {}\n            \n            // First forward this fissure along\n            node.bindings(key).forEach(pipe => {\n                if (pipe.send && (!origin || (pipe.id !== origin.id)))\n                    pipe.send({\n                        method: 'fissure',\n                        key,\n                        fissure\n                    })\n            })\n            \n            // And if this fissure matches us, then send the anti-fissure for\n            // it\n            if (fissure.b == node.pid)\n                node.fissure({\n                    key,\n                    fissure: {\n                        a:        node.pid,\n                        b:        fissure.a,\n                        conn:     fissure.conn,\n                        versions: fissure.versions,\n                        parents:  {},\n                        time:     fissure.time\n                    }\n                })\n        }\n    },\n\n    disconnected ({key, name, versions, parents, time, origin}) {\n        // Todo:\n        //  - rename \"name\" to \"fissure\".\n        //  - rename \"time\" to \"disconnect_time\"\n\n        // if we haven't sent them a welcome (or they are not remote), then no\n        // need to create a fissure\n        if (!origin.remote_peer|| !node.resource_at(key).keepalive_peers[origin.id]) return\n        \n        // now since we're disconnecting, we reset the keepalive_peers flag\n        delete node.resource_at(key).keepalive_peers[origin.id]\n\n        assert(key && origin)\n        // To do:\n        //  - make this work for read-only connections\n        //  - make this work for multiple keys (a disconnection should\n        //    affect all of its keys)\n        var resource = node.resource_at(key),\n            fissure\n\n        assert(!(name || versions || parents), 'Surprise!')\n\n        // Generate the fissure\n        if (name) {\n            // Create fissure from name\n            var [a, b, conn] = name.split(/:/)\n            fissure = {\n                a, b, conn,\n                versions,\n                parents,\n                time\n            }\n        } else {\n            // Create fissure from scratch\n\n            // assert(resource.subscriptions[origin.id],\n            //        `This pipe ${origin.id} is not on the resource for ${node.pid}'s ${key}`,\n            //        resource.subscriptions)\n            \n            assert(origin.id,          'Need id on the origin', origin)\n            assert(origin.remote_peer, 'Need a peer on origin', origin)\n\n            var versions = {}\n            var ack_versions = resource.ancestors(resource.acked_boundary)\n            Object.keys(resource.time_dag).forEach(v => {\n                if (!ack_versions[v] || resource.acked_boundary[v])\n                    versions[v] = true\n            })\n            \n            // Now collect the parents.  We start with all fissures...\n            var parents = {...resource.fissures}\n            // ... and then filter down to just be the leaves of the fissure DAG\n            Object.values(resource.fissures).forEach(f => {\n                Object.keys(f.parents).forEach(p => delete parents[p])\n            })\n            Object.keys(parents).forEach(p => parents[p] = true)\n\n            fissure = {\n                a: node.pid,\n                b: origin.remote_peer,\n                conn: origin.connection,\n                versions,\n                parents,\n                time\n            }\n\n        }\n\n        node.fissure({key, origin, fissure})\n    },\n\n    welcome (args) {\n        var {key, versions, fissures, unack_boundary, min_leaves, parents,\n             origin, versions_to_add, added_versions} = args\n\n        var resource = node.resource_at(key)\n\n        // Next we process the incoming fissures, and like before, we only\n        // want to add new ones, and there's also this gen_fissures variable\n        // which is short for \"generated_fissures\", and records fissures which\n        // we created just now as part of a special case where we receive a\n        // fissure that we were supposedly involved with, but we don't have a\n        // fissure record for (this can happen when someone tries to connect\n        // with us, but the connection is broken even before we knew they were\n        // trying to connect)\n\n        var new_fissures = []\n        var gen_fissures = []\n        fissures.forEach(f => {\n            var key = f.a + ':' + f.b + ':' + f.conn\n            if (!resource.fissures[key]) {\n\n                // So we don't have this fissure.. let's add it..\n\n                new_fissures.push(f)\n                resource.fissures[key] = f\n\n                // Now let's check for that special case where we don't have\n                // the fissure, but we're one of the ends of the fissure (note\n                // that we don't check for f.a == node.pid because that would\n                // be a fissure created by us -- we're looking for fissures\n                // not created by us, but that we are the other end of).  We\n                // just add these fissures to gen_fissures for now, and later\n                // in this function we'll iterate over gen_fissures and\n                // actually add these fissures to our data structure (as well\n                // as tell them to our peers)\n                //\n                // If we don't do this, then this fissure will never get pruned,\n                // because it will never find its \"other half\"\n\n                if (f.b == node.pid) gen_fissures.push({\n                    a:        node.pid,\n                    b:        f.a,\n                    conn:     f.conn,\n                    versions: f.versions,\n                    parents:  {},\n                    time:     f.time\n                })\n            }\n        })\n\n        // There is this thing called the unack_boundary, which defines a set\n        // of nodes (namely everything on the boundary, and any ancestors of\n        // anything on the boundary), and these nodes should exhibit the\n        // behavior that even if a global acknowledgment is received for them,\n        // it should be ignored.\n        //\n        // Why should we ignore them? well, this welcome message we've received\n        // is kindof like an anti-fissure -- it is a new citizen in the network,\n        // and the whole idea of a \"global ack\" is that all citizens connected\n        // directly or transitively to ourselves have seen this version,\n        // but imagine that there is a \"global ack\" sitting the our message queue,\n        // but it was created before this new connection, meaning that it's\n        // claim has been violated (in particular, this new citizen may not\n        // have seen the version, and this new citizen may bring in transitive\n        // access to even more citizens, which also may not have seen the version),\n        // so rather than trying to figure out who has seen what when a new\n        // connection is established, we sortof blacklist global acknowledgments\n        // for all versions in both our, and the new citizens current versions,\n        // and we wait for a version created after this connection event\n        // to get globally acknowledged (note that this involves un-globally\n        // acknowledging things that we had thought were globally acknowledged,\n        // but not everything -- if a version is globally acknowledged by us,\n        // and also by the incoming citizen, then we keep that version as\n        // globally acknowledged)\n\n        // This next if statement deals with two cases of the welcome message.\n        // in one case, the welcome is sent as a response to a get, in which\n        // case unack_boundary is null (and you can see that we just set it to\n        // be absolutely all of the versions we currently know about, both in\n        // our own version set, and the incoming version set, since we already\n        // added the incoming versions to our set). If it isn't null, then we\n        // don't need to give it a value here (and this message must be a case\n        // of propoagating a welcome around the network)\n        //\n        // So conceptually, we establish the unack_boundary on the initial\n        // welcome (and we can't know it before then, because the person\n        // sending us this welcome doesn't know which versions we have), and\n        // then once it is established, we hardcode the result into the\n        // welcome messages that we send to our peers\n\n        if (!unack_boundary)\n            unack_boundary = Object.assign({}, resource.current_version)\n\n        // To understand this next bit of code, first know that these\n        // \"boundary\" variables are really just trying to be more effecient\n        // ways of storing sets of versions (which include everything on the\n        // boundary, as well as all the ancestors of those versions). If we\n        // were using sets, our code would be doing this:\n        //\n        // resource.unack_set = union(resource.unack_set, unack_set)\n        //\n        // That is, we want to union our pre-existing unacked stuff with\n        // the new incoming unacked stuff. But since our implementation\n        // uses boundaries rather than sets, we get the code that follows\n        // (you can see that the only modifications being made are to\n        // resource.unack_boundary, where we delete some stuff, and add\n        // some stuff, so that it represents the new boundary)\n\n        var our_conn_versions = resource.ancestors(resource.unack_boundary)\n        var new_conn_versions = resource.ancestors(unack_boundary)\n\n        Object.keys(resource.unack_boundary).forEach(x => {\n            if (new_conn_versions[x] && !unack_boundary[x])\n                delete resource.unack_boundary[x]\n        })\n        Object.keys(unack_boundary).forEach(x => {\n            if (!our_conn_versions[x]) resource.unack_boundary[x] = true\n        })\n\n        // So that was dealing with the unack_boundary stuff... now we want to\n        // deal with the globally acknowledged stuff. Basically, anything that\n        // is globally acknowledged by both us, and the incoming citizen, will\n        // remain globally acknowledged. We'll compute these versions as the\n        // intersection of ours and their acknowledged set, and then store\n        // just the boundary of the intersection set and call it \"min_leaves\"\n        // (where \"min\" basically means \"intersection\" in this case, and used\n        // to be paired with \"max_leaves\", which meant \"union\", and was used\n        // to represent the unack_boundary above)\n        //\n        // As before, min_leaves will be null on the initial welcome,\n        // and we'll compute it, and then subsequent welcomes will have this\n        // result included...\n        \n        if (!min_leaves) {\n            if (versions.length === 0 && (!parents || Object.keys(parents).length === 0))\n                min_leaves = {...resource.current_version}\n            else {\n                min_leaves = parents ? {...parents} : {}\n                versions.forEach(v => {\n                    if (!versions_to_add[v.version]) min_leaves[v.version] = true\n                })\n                min_leaves = resource.get_leaves(resource.ancestors(min_leaves, true))\n            }\n        }\n\n        // We are now armed with this \"min_leaves\" variable,\n        // either because we computed it, or it was given to us...\n        // what do we do with it? well, we want to roll-back our\n        // boundary of globally acknowledged stuff so that it only\n        // includes stuff within \"min_leaves\" (that is, we only want\n        // to keep stuff as globally acknowledged if it was already\n        // globally acknowledged, and also it is already known to this\n        // incoming citizen)\n        //\n        // As before, we're really doing a set intersection (in this case\n        // an intersection between min_leaves and our own acked_boundary),\n        // but the code looks wonkier because all our variables store\n        // the boundaries of sets, rather than the sets themselves\n\n        var min_versions = resource.ancestors(min_leaves)\n        var ack_versions = resource.ancestors(resource.acked_boundary)\n        Object.keys(resource.acked_boundary).forEach(x => {\n            if (!min_versions[x])\n                delete resource.acked_boundary[x]\n        })\n        Object.keys(min_leaves).forEach(x => {\n            if (ack_versions[x]) resource.acked_boundary[x] = true\n        })\n\n        // This next line of code is pretty drastic.. it says: \"If we're\n        // connecting to someone new, then all our hard work keeping track of\n        // acknowledgments is now useless, since it relies on an algorithm\n        // that assumes there will be no patches in the network topology\n        // whilst the algorithm is being carried out -- and the network\n        // topology just changed, because now there's this new guy\"\n        //\n        // Fortunately, once a new version is globally acknowledged within the\n        // new topology, it's acknowledgment will extend to these versions as\n        // well, because global acknowledgments apply to all ancestors of a\n        // version, and any new versions will include all existing versions as\n        // ancestors.\n        \n        resource.acks_in_process = {}\n\n        // Ok, we're pretty much done. We've made all the patches to our own\n        // data structure (except for the gen_fissures, which will happen\n        // next), and now we're ready to propogate the information to our\n        // peers.\n\n        assert(unack_boundary && min_leaves && fissures && added_versions)\n\n        // In the above, when we added new versions and fissures to ourselves,\n        // we marked each such instance in added_versions or new_fissures, and\n        // if we got any new versions or fissures, then we want to tell our\n        // peers about it (if we didn't, then we don't need to tell anyone,\n        // since there's nothing new to hear about)\n\n        if ((added_versions.length > 0\n             || new_fissures.length > 0\n             || !resource.weve_been_welcomed)) {\n\n            // Now record that we've seen a welcome\n            resource.weve_been_welcomed = true\n\n            // And tell everyone about it!\n            node.bindings(key).forEach(pipe => {\n                if (pipe.send && (pipe.id !== origin.id))\n                    pipe.send({method: 'welcome',\n                               key, versions: added_versions, unack_boundary,\n                               min_leaves, fissures: new_fissures})\n            })\n        }\n\n\n        // now we finally add the fissures we decided we need to create in\n        // gen_fissures... we add them after forwarding the welcome so that\n        // these network messages appear after the welcome (since they may\n        // rely on information which is in the welcome for other people to\n        // understand them)\n        gen_fissures.forEach(f => node.fissure({key, fissure:f}))\n    }\n})\n\n\nfunction add_full_ack_leaf(node, resource, version) {\n\n    // G: someone is telling us that \"version\" is fully (globally) acknowledged,\n    // and this fact implies that every ancestor of version is also fully\n    // acknowledged, which means that we don't need to keep certain information\n    // about them, like \"acks_in_process\".. this next section simply\n    // iterates over all the ancestors (including this version itself) and deletes\n    // information we don't need anymore for each one..\n\n    var marks = {}\n    function f(v) {\n        if (!marks[v]) {\n            marks[v] = true\n            delete resource.unack_boundary[v]\n            delete resource.acked_boundary[v]\n            delete resource.acks_in_process[v]\n            Object.keys(resource.time_dag[v]).forEach(f)\n        }\n    }\n    f(version)\n\n    // G: now that old information is gone, we need to add one bit of new\n    // information, namely that this version is fully acknowledged,\n    // which we express by putting it in the \"acked_boundary\" (and we hope\n    // that nobody calls this function on a version which is already fully\n    // acknowledged; you can check the two places where this function is called\n    // to verify that they guard against calling this function on a version\n    // which is already fully acknowledged.. note that one does so by noting\n    // that \"acks_in_process\" will always be null for versions which are fully\n    // acknowledged, because \"acks_in_process\" is deleted in section above\n    // for all such versions)\n\n    resource.acked_boundary[version] = true\n\n    // G: next we're going to prune.. really we could call prune whenever we want,\n    // this is just a somewhat reasonable time, since there is some chance\n    // that with this new full acknowledgment, that we might be able to prune\n    // more stuff than we could prune before (but we could also let the user\n    // call \"prune\" explicitly at their leisure)\n\n    start_prune(node, resource)\n}\nfunction check_ack_count(node, key, resource, version) {\n    // TODO: could this only take key, instead of key and resource?  Or\n    // perhaps a resource should know its key?\n    assert(!resource.acks_in_process[version]\n           || resource.acks_in_process[version].count >= 0,\n           'Acks have gone below zero!',\n           {key, version,\n            acks_in_process: resource.acks_in_process[version]})\n\n    // G: this function gets called from a couple of places, basically whenever\n    // someone suspects that the \"count\" within \"acks_in_process\" may have changed,\n    // since it might have gone all the way to zero, in which case we will act...\n    // of course, in some such instances, acks_in_process may have been removed\n    // entirely for a version, so we guard against that here, too..\n\n    if (resource.acks_in_process[version]\n        && resource.acks_in_process[version].count == 0) {\n\n        // G: sweet, the count has gone to zero, that means all the acks we were\n        // waiting for have arrived, now there are a couple possibilities..\n\n        if (resource.acks_in_process[version].origin) {\n\n            // G: in this case, we have an \"origin\", which means we didn't create\n            // this version ourselves, and \"origin\" tells us who we first heard\n            // about it from, and so now, as per the ack-algorithm, we're going\n            // to send an ack back to that person (because the algorithm tells us\n            // to only send an ack after we have received acks from everyone\n            // we forwarded the information to)\n\n            let p = resource.acks_in_process[version].origin\n            p.send && p.send({\n                method: 'ack', key, seen:'local', version\n            })\n        } else {\n\n            // G: in this case, we have no \"origin\", which means we created\n            // this version ourselves, and now the fact that all our peers\n            // have acknowledged it means that all of their peers have also\n            // acknowledged. In fact, everyone in the network must have\n            // acknowledged it (or else we would have received a fissure\n            // before receiving this acknowledgment, and that fissure would\n            // have wiped away \"acks_in_process\" for this version), so that\n            // means this version is \"fully (globally) acknowledged\",\n            // so we'll call add_full_ack_leaf for this version..\n\n            add_full_ack_leaf(node, resource, version)\n\n            // G: but \"add_full_ack_leaf\" just modifies our own datastructure,\n            // and we must also give the good news to everyone else, so\n            // we send a \"global\" ack to all our peers (and they'll forward it\n            // to their peers)\n\n            node.bindings(key).forEach( pipe => {\n                pipe.send && pipe.send({method: 'ack', key, seen:'global', version})\n            })\n        }\n    }\n}\n\nfunction start_prune (node, resource) {\n    var unremovable = {}\n\n    if (!resource.fissures)\n        console.error('Bad resource', resource)\n\n    // First, let's prune old fissures\n\n    // Calculate which fissures we have to keep due to parenting\n    // rule... which we will be removing soon.\n    Object.entries(resource.fissures).forEach(x => {\n        if (!resource.fissures[x[1].b + ':' + x[1].a + ':' + x[1].conn]) {\n            function f(y) {\n                if (!unremovable[y.a + ':' + y.b + ':' + y.conn]) {\n                    unremovable[y.a + ':' + y.b + ':' + y.conn] = true\n                    unremovable[y.b + ':' + y.a + ':' + y.conn] = true\n                    Object.keys(y.parents).forEach(p => {\n                        if (resource.fissures[p]) f(resource.fissures[p])\n                    })\n                }\n            }\n            f(x[1])\n        }\n    })\n    \n    // Now remove the fissures\n    Object.entries(resource.fissures).forEach(x => {\n        var other_key = x[1].b + ':' + x[1].a + ':' + x[1].conn\n        var other = resource.fissures[other_key]\n        if (other) {\n            if (unremovable[x[0]]) {\n                resource.fissures[x[0]].versions = {}\n                resource.fissures[other_key].versions = {}\n            } else {\n                delete resource.fissures[x[0]]\n                delete resource.fissures[other_key]\n            }\n        }\n    })\n\n    // Remove fissures that have expired due to time\n    if (node.fissure_lifetime != null) {\n        var now = Date.now()\n        Object.entries(resource.fissures).forEach(([k, f]) => {\n            if (f.time == null) f.time = now\n            if (f.time <= now - node.fissure_lifetime) {\n                delete resource.fissures[k]\n            }\n        })\n    }\n\n    // Remove fissures that are beyond our max_fissures limit\n    if (node.max_fissures != null) {\n        let count = Object.keys(resource.fissures).length\n        if (count > node.max_fissures) {\n            Object.entries(resource.fissures).sort((a, b) => {\n                if (a[1].time == null) a[1].time = now\n                if (b[1].time == null) b[1].time = now\n                return a[1].time - b[1].time\n            }).slice(0, count - node.max_fissures).forEach(e => {\n                delete resource.fissures[e[0]]\n            })\n        }\n    }\n\n    // Now figure out which versions we want to keep,\n    var keep_us = {}\n\n    // incluing versions in fissures..\n    Object.values(resource.fissures).forEach(f => {\n        Object.keys(f.versions).forEach(v => keep_us[v] = true)\n    })\n\n    // and versions which are not fully acknowledged, or on the boundary\n    var acked = resource.ancestors(resource.acked_boundary)\n    Object.keys(resource.time_dag).forEach(x => {\n        if (!acked[x] || resource.acked_boundary[x]) keep_us[x] = true\n    })\n\n    // ok, now we want to find \"bubbles\" in the dag,\n    // with a \"bottom\" and \"top\" version,\n    // where any path down from the top will hit the bottom,\n    // and any path up from the bottom will hit the top,\n    // and also, the bubble should not contain any versions we want to keep\n    // (unless it's the bottom)\n\n    // to help us calculate bubbles,\n    // let's calculate children for our time dag\n    // (whereas the time dag just gives us parents)\n    var children = {}\n    Object.entries(resource.time_dag).forEach(([v, parents]) => {\n        Object.keys(parents).forEach(parent => {\n            if (!children[parent]) children[parent] = {}\n            children[parent][v] = true\n        })\n    })\n\n    // now we'll actually compute the bubbles\n    var to_bubble = {}\n    var bubble_tops = {}\n    var bubble_bottoms = {}\n    \n    function mark_bubble(bottom, top, tag) {\n        if (!to_bubble[bottom]) {\n            to_bubble[bottom] = tag\n            if (bottom !== top)\n                Object.keys(resource.time_dag[bottom]).forEach(\n                    p => mark_bubble(p, top, tag)\n                )\n        }\n    }\n    \n    // This begins the O(n^2) operation that we wanna shrink to O(n)\n    var done = {}\n    function f(cur) {\n        if (!resource.time_dag[cur]) return\n        if (done[cur]) return\n        done[cur] = true\n        \n        if (!to_bubble[cur] || bubble_tops[cur]) {\n            var bubble_top = find_one_bubble(cur)\n            if (bubble_top) {\n                delete to_bubble[cur]\n                mark_bubble(cur, bubble_top, bubble_tops[cur] || cur)\n                bubble_tops[bubble_top] = bubble_tops[cur] || cur\n                bubble_bottoms[bubble_tops[cur] || cur] = bubble_top\n            }\n        }\n        \n        Object.keys(resource.time_dag[cur]).forEach(f)\n    }\n    Object.keys(resource.current_version).forEach(f)\n    // This is the end of an O(n^2) algorithm\n\n    to_bubble = Object.fromEntries(Object.entries(to_bubble).map(\n        ([v, bub]) => [v, [bub, bubble_bottoms[bub]]]\n    ))\n    \n    function find_one_bubble(cur) {\n        var seen = {[cur]: true}\n        var q = Object.keys(resource.time_dag[cur])\n        var expecting = Object.fromEntries(q.map(x => [x, true]))\n        while (q.length) {\n            cur = q.pop()\n            if (!resource.time_dag[cur]) return null\n            if (keep_us[cur]) return null\n            if (Object.keys(children[cur]).every(c => seen[c])) {\n                seen[cur] = true\n                delete expecting[cur]\n                if (!Object.keys(expecting).length) return cur\n                \n                Object.keys(resource.time_dag[cur]).forEach(p => {\n                    q.push(p)\n                    expecting[p] = true\n                })\n            }\n        }\n        return null\n    }\n\n    // now hand these bubbles to the mergeable's prune function..\n    if (resource.mergeable.prune)\n        resource.mergeable.prune(to_bubble)\n}"
  },
  {
    "path": "kernel/demos/simple/simple-client.html",
    "content": "<script src=\"../../../builds/braid-bundle.js\"></script>\n<body>\n  <div id=\"out\"></div>\n  <textarea id=\"in\"></textarea>\n  <h2>How to use</h2>\n  <p>Run <code>nodemon simple-server network</code> on command-line\n  <p>And type in the box above\n  <p>You can watch the network messages scroll by in your terminal\n</body>\n\n<script>; // Semicolon helps Mike's editor indent properly\n\n// Create a node\nvar id = 'C-' + Math.random().toString(36).substr(10)\nnode = require('../../node.js')({pid: id})\nrequire('../../websocket-client.js')({node})\n\n// Configure the node\nnode.get('/foo', (x) => {\n    document.getElementById('out').innerText = x\n    if (document.getElementById('in').value !== x)\n        document.getElementById('in').value = x\n})\ndocument.getElementById('in').addEventListener('input',\n                                               (e) => node.set('/foo', e.target.value))\n\nprint_network = true\n// show_debug = true\n</script>"
  },
  {
    "path": "kernel/demos/simple/simple-server.js",
    "content": "var certificate = `-----BEGIN CERTIFICATE-----\nMIIDXTCCAkWgAwIBAgIJANoWGfl3pEeHMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV\nBAYTAlVTMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX\naWRnaXRzIFB0eSBMdGQwHhcNMTkwODE2MjAxNTIxWhcNMjAwODE1MjAxNTIxWjBF\nMQswCQYDVQQGEwJVUzETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50\nZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB\nCgKCAQEA1bilKJKH1axV0OLLIwg3WxXx6MMsFL3/bv2uX9+Z22uZukJsgqnR2y+6\nOCLH8opczH4Now3Od+P0G4kNSn9m+T5W5bvf9bIIDmCG/04uGCvx0L8bgYA5lyMJ\naFdcfCXu1iKvUt1LdZlds2AsBfceYCB6FwsMkUODzZ7OJ6R1aXUHxQ74me/ksoxV\nP7Fmv012gRJkYn5gzvrokula2Yxb+z84TP115tALYBBpLhj5WPOXSmyVo0Lf1dGQ\nJfbRxvx32pxZiBPwcNre3yzKhRue99tRuPHFCQBZSkXGuT7K9bsNnPwXfAmB2VbQ\nbjezmqVGv8KnwyTRWdLaEcV9cxHCnQIDAQABo1AwTjAdBgNVHQ4EFgQUOoDGcBG8\nXm/Jj+WbIYctxhGqD6owHwYDVR0jBBgwFoAUOoDGcBG8Xm/Jj+WbIYctxhGqD6ow\nDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAaHjdu8Hg34Zzay4djFSo\nhRno4m+tiJ4UT3oLTHRGh54JFKQPeLLEY0WbhrBDyuDJrCdyjvmqpuELPPwNRdo0\nLy3fhRIxeaN8px6V0bpdj0ePDqC0ZU5It/9jVlC0OkdG2xwJygw+xNLaHb09l7rj\nZLM+tOKQEBxZCLKqc1FLlS9MIxDKaVdI2JSBDmNl+0XyFwKM6bfI3Mk8STuZXm5A\nEtWvDNbLFl6TLyKDeHNRc0LQEa74xE3yhoWO3kb9phL4A1g/I7rW+B2we4N84FfT\nv5C5/zn58xabUtMVeGUi/avnVz+C4HY4ZMEIQPIodtsRcZq05RQGW8ipig7QaXnD\ngQ==\n-----END CERTIFICATE-----\n`, private_key = `-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDVuKUokofVrFXQ\n4ssjCDdbFfHowywUvf9u/a5f35nba5m6QmyCqdHbL7o4IsfyilzMfg2jDc534/Qb\niQ1Kf2b5Plblu9/1sggOYIb/Ti4YK/HQvxuBgDmXIwloV1x8Je7WIq9S3Ut1mV2z\nYCwF9x5gIHoXCwyRQ4PNns4npHVpdQfFDviZ7+SyjFU/sWa/TXaBEmRifmDO+uiS\n6VrZjFv7PzhM/XXm0AtgEGkuGPlY85dKbJWjQt/V0ZAl9tHG/HfanFmIE/Bw2t7f\nLMqFG57321G48cUJAFlKRca5Psr1uw2c/Bd8CYHZVtBuN7OapUa/wqfDJNFZ0toR\nxX1zEcKdAgMBAAECggEAWCxLh0ec3tywsvM+V3+mRt/w49TRtOUGIyZp8IfxlAL6\nc0vANNAXElTIgSxoTXoj+wHuYlzp17CmH04Vu6yAMUg01acDKPyAMl5Ek8QPZE2N\nAFA36t+Z4u7DjNauA1IrDRFWP9uorCXP8Jc20mc3kvUTKbqXPr8Z+5UO/G/vOMgc\nQKXPoz45EbFahTwck4TQowLeKhAF3BU5fn48zuBy055q6babV1z0LDzDIUGcZqHv\n4VPMLOUp1KzpwoQd6o3wwBBttJkFqBf7US3nExdq0SkHgwE/lOKgJuSMmgAWgGm5\n3iO8F+Ve84206IgmhQOMw3KZjIgWdiCW/dgVbJQsQQKBgQD4/2Wr4NYfdXqotnjT\nMZCx5921nFwkMyt7JndCIs49CQ3lMGtlijRtVHGhZKVHUZr4SKKfjbOAiABPCsRL\nZhvVnhlbmUioSgfMM/Y+fkCs3DdzuJE9tVuSdyQFoblY5W1dLeuLTEI3TDTos+V7\njfKsHMqF0gAbCkt7GgVpy5vCRQKBgQDbu0ibskjwF5voMuJmJdvIv0XAi91sRTRJ\nRuDrH6NPU+RrVHTHRJMtGRM5zWI4b7N0KTx+J2xaJ6J/FxbfsdThgKb99gB9j3hR\nF0CK/quMjAwpezWwatHarK87c//rvmIBVL82xLe3sQKxmwdCUiyhum/4l+GN+WpZ\nlfP4HU4weQKBgQD18WaekBVPu31tedb8XB/c6fZ/NTN5+iT/ni374F8vwGq+L8ZU\n5F8Ggns+fCgYus1EYpJm4NMlqLANYsgi5Xem12Oaq1wuBfmPxN98OL5vP5FyNyMW\n/bS2hgHJokVuPid4+yuGSsu4zQgRted80+eYA1QzPAsoqlGGBVzFc/yktQKBgQDP\nRcqHPFV7Tfn+vkk8bEf4BR4KNKWJZXqeCONQSEboJM3axQ9njXN73iR5qRkW/Z99\nWwy6P/wAy1SIqEImf3y9v3tHI1BxIO4xKEr1EqjGarFqS9Rod0tACRc/cPwf6DZQ\n5R1+z3AyMiLFYOUnFZcOdGz9RmA5aeZ9XWuHSDWimQKBgGgmRWuGasEEMXdnkLQA\nrNg1Di5DFv+KvXwgTo63MxwBs2olQ7jUsFf8khipqpByGazYgGeEa1RxDGpQrdyO\nI/5N3d5VcGW4g9obfdexuuKOloyKRS2N0KNhLfEfb+qr4gRACPpyKnj5Jeohliox\nbHieUzx8qriZ8KrD3PbjKqap\n-----END PRIVATE KEY-----\n`\n\nnode = require('../../node')()\nnode.pid = 'hub'\nrequire('../../websocket-server.js')(node, certificate, private_key)\n"
  },
  {
    "path": "kernel/demos/sync9-chat/chat-server.js",
    "content": "var fs = require('fs')\nvar path = require('path')\nvar ws = require('ws')\nrequire('dotenv').config()\n\n// When we have the npm version, this can be improved\nvar lib_path = \"../../../\"\n\n// Bundler doesn't actually return anything, but calling it with require\n// generates the braid-bundle.js\nrequire(path.join(lib_path, './util/braid-bundler.js'))\nvar sqlite = require(path.join(lib_path, './kernel/sqlite-store.js'))\nvar store = require(path.join(lib_path, './kernel/store.js'))\nvar braid = require(path.join(lib_path, './kernel/node.js'))\nvar braid_websocket_server = require(path.join(lib_path, './kernel/websocket-server.js'))\nvar braid_http_server = require(path.join(lib_path, './kernel/http-server.js'))\nvar webpush = require(\"web-push\")\n\nif (process.env.MAIL_TO\n    && process.env.WEB_PUSH_PUBLIC\n    && process.env.WEB_PUSH_PRIVATE)\n    webpush.setVapidDetails(\n        process.env.MAIL_TO,  // Needs email address to send from\n        process.env.WEB_PUSH_PUBLIC,\n        process.env.WEB_PUSH_PRIVATE\n    )\n\nvar port = 3009\n\n// Static files to serve over HTTP\nvar known_files = {\n\t'/braid-bundle.js': {\n\t\tpath: path.join(lib_path, `/builds/braid-bundle.js`),\n\t\tmime: 'text/javascript'\n\t},\n\t'/braidchat': {\n\t\tpath: path.join('.', '/chat.html'),\n\t\tmime: 'text/html'\n\t},\n\t'/settings': {\n\t\tpath: path.join('.', '/settings.html'),\n\t\tmime: 'text/html'\n\t},\n\t'/chat.js': {\n\t\tpath: path.join('.', '/chat.js'),\n\t\tmime: 'text/javascript'\n\t},\n\t'/chat.css': {\n\t\tpath: path.join('.', '/chat.css'),\n\t\tmime: 'text/css'\n\t},\n\t'/mobile.css': {\n\t\tpath: path.join('.', '/mobile.css'),\n\t\tmime: 'text/css'\n\t},\n\t'/favicon.ico': {\n\t\tpath: path.join('.', '/favicon.ico'),\n\t\tmime: 'image/x-icon'\n\t},\n\t'/white-airplane.png': {\n\t\tpath: path.join('.', '/white-airplane.png'),\n\t\tmime: 'image/png'\n\t},\n\t'/black-airplane.png': {\n\t\tpath: path.join('.', '/black-airplane.png'),\n\t\tmime: 'image/png'\n\t},\n\t'/settings.css': {\n\t\tpath: path.join('.', '/settings.css'),\n\t\tmime: 'text/css'\n\t},\n\t'/client.js': {\n\t\tpath: path.join('.', '/client.js'),\n\t\tmime: 'text/javascript'\n\t},\n\t'/worker.js': {\n\t\tpath: path.join('.', '/worker.js'),\n\t\tmime: 'text/javascript'\n\t},\n\t'/icon.png': {\n\t\tpath: path.join('.', '/icon.png'),\n\t\tmime: 'image/png'\n\t}\n}\n// Keys that braid knows about, and their default values.\nvar known_keys = {\n\t'/usr': {},\n\t'/chat': []\n}\n\nlet endpoints = [] //list of devices connected to webpush notifications\nlet last_sent = {}\n\nasync function get_body(req) {\n\tvar body = ''\n\tawait req.on('data', function(data) {\n\t\tbody += data\n\t\tconsole.log('Partial body: ' + body)\n\t})\n\treturn body\n}\n\n// A simple method to serve one of the known files\nasync function serve_file(req, res) {\n\tif (req.method == 'POST') {\n\t\tconsole.log('POST to: ' + req.url)\n\t\tlet body = await get_body(req)\n\t\tlet json_body = JSON.parse(body)\n\n\t\tif (req.url === '/subscribe') {\n\t\t\tif (!endpoints.includes(body)) {\n\t\t\t\tconsole.log(\"Adding new endpoint\")\n\t\t\t\tendpoints.push(body)\n\t\t\t}\n\t\t\tvar payload = JSON.stringify({ title: 'Test Notification on chat' })\n\t\t\t// Sends a test notification\n\t\t\twebpush\n\t\t\t\t.sendNotification(json_body, payload)\n\t\t\t\t.catch(err => console.error(err))\n\t\t} else if (req.url === '/token') {\n\t\t\tconsole.log(\"Saving token\")\n\t\t\tsave_token(json_body['token'])\n\t\t} else if (req.url === '/message') {\n\t\t\tconsole.log(\"New message (sent as post request)\")\n\t\t\tlet notifications = build_mobile_notifications('user', 'basic notification')\n\t\t\tsend_mobile_notifications(notifications)\n\t\t}\n\t\tres.writeHead(201, {'Content-Type': 'text/html'})\n\t\tres.end()\n\t} else {\n\t\tif (known_keys.hasOwnProperty(req.url))\n\t\t\treturn braid_callback(req, res)\n\t\tvar req_path = new URL(req.url, `http://${req.headers.host}`)\n\t\tvar f = known_files[req_path.pathname]\n\t\tif (f) {\n\t\t\tres.writeHead(200, headers = { 'content-type': f.mime })\n\t\t\tfs.createReadStream(f.path).pipe(res)\n\t\t} else {\n\t\t\tres.writeHead(404)\n\t\t\tres.end()\n\t\t}\n\t}\n}\n\n\nvar send_push_notifications = () => {\n\tlet send_to = []\n\tfor (let i = 0; i < endpoints.length; i++)\n\t  send_to.push(JSON.parse(endpoints[i]))\n\n\tvar payload = JSON.stringify({\n        title: 'New message on BraidChat',\n        click_action: 'https://invisible.college/chat/',\n        body: \"BraidChat\",\n        icon: \"https://ibb.co/p4wKfsR\"\n    })\n    console.log(\"Sending message: \" + JSON.stringify(payload));\n\n\tfor (let i = 0; i < send_to.length; i++) {\n\t  send_to[i]['click_action'] = 'https://invisible.college/chat/'\n\t  console.log(\"sending webpush to user\")\n\t  webpush\n\t\t.sendNotification(send_to[i], payload)\n\t\t.catch(err => console.error(err));\n\t}\n}\n\n\n\n// Create either an http or https server, depending on the existence of ssl certs\nvar server =\n    (fs.existsSync('certs/private-key') && fs.existsSync('certs/certificate'))\n    ? require('https').createServer(\n        { key: fs.readFileSync('certs/private-key'),\n\t\t  cert: fs.readFileSync('certs/certificate') },\n        serve_file)\n    : require('http').createServer(serve_file)\n\n// Setup the braid sqlite store at a local db\nvar db = sqlite('db.sqlite')\nvar node = braid({pid: 'server-' + Math.random().toString(36).slice(2,5)})\nnode.fissure_lifetime = 1000 * 60 * 60 * 24 // Fissures expire after 24 hours\n\nvar braid_callback = braid_http_server(node)\nstore(node, db).then(node => {\n\t// Unsubscribe on error\n\t// Maybe not needed\n\tnode.on_errors.push((key, origin) => node.unbind(key, origin))\n\n\t// For any of the default keys, if we have no versions for them, set an initial version.\n\tObject.keys(known_keys)\n\t\t.filter(k => Object.keys(node.resource_at(k).current_version).length == 0)\n\t\t.forEach(k => node.set(k, known_keys[k]))\n\tObject.keys(known_keys)\n\t\t.forEach(k => node.get(k))\n\n\tvar wss = new ws.Server({ server })\n\tbraid_websocket_server(node, { port, wss })\n\n\tconsole.log('Keys at startup: ' + JSON.stringify(Object.keys(node.resources)))\n\tserver.listen(port)\n\tconsole.log('Listening on port', port)\n})\n\n\n//App notifications\nvar notification_node = require(\"../../node.js\")()\nnotification_node.websocket_client({url:'wss://invisible.college:3009'})\nnotification_node.get('/usr', add_users)\nnotification_node.get('/chat', update_messages)\nvar { Expo } = require(\"expo-server-sdk\")\nlet expo = new Expo()\n\nfunction update_messages(new_val) {\n    let message = new_val[new_val.length -1]\n    console.log(JSON.stringify(message))\n    console.log(message['body'])\n    if (last_sent != message['body']) {\n\t    //web notifications\n\t    send_push_notifications()\n\t    //mobile notifications\n\t    let notifications = build_mobile_notifications(get_name(message), message['body'])\n\t    send_mobile_notifications(notifications)\n        last_sent = message['body']\n        console.log(\"Sent message\")\n    } else\n        console.log(\"Didn't send push notification:\" +  message['body'])\n}\n\nlet saved_users = {}\nfunction add_users(user_dict){\n\tsaved_users = JSON.parse(JSON.stringify(user_dict))   //new json object here\n}\n\nfunction get_name(message){\n\tlet name = saved_users[message['user']]\n\tif (name == undefined)\n\t\tname = \"unknown\"\n\telse\n\t\tname = name['displayname']\n\n\treturn name\n}\n\nlet saved_push_tokens = []\nfunction save_token(token) {\n\tconsole.log(token.value, saved_push_tokens)\n\tconsole.log(JSON.stringify(token))\n    var exists = saved_push_tokens.find(t => t === token.value)\n    if (!exists) {\n        console.log(\"new device saved for push notifications\")\n        saved_push_tokens.push(token.value)\n    } else\n      console.log(\"Device was already saved\")\n}\n\n//creates the mobile notifications. One for every device\nvar build_mobile_notifications = ( user, message ) => {\n    if (message === undefined) {\n        console.log(\"message is undefined\")\n        return undefined\n    }\n    console.log(\"Sending push notification\", {message, user},\n                \"to\", saved_push_tokens.length, 'devices.')\n    let notifications = []\n    let index = -1\n    for (let push_token of saved_push_tokens) {\n\t\tconsole.log(\"sending to device:\" + push_token)\n\t\tindex++\n\t\tif (!Expo.isExpoPushToken(push_token)) {\n\t\t    console.error(`Push token ${push_token} is not a valid Expo push token`)\n\t\t    continue\n\t\t}\n\t\tnotifications.push({\n\t\t\tto: push_token,\n\t\t\tsound: \"default\",\n\t\t\ttitle: user,\n\t\t\tbody: message,\n\t\t\tdata: { message }\n\t\t})\n\t}\n\treturn notifications\n}\n\n//Sends the notification list \nvar send_mobile_notifications = (notifications) => {\n    if (!notifications || notifications.length == 0) {\n\t    console.log(\"no devices linked\")\n\t    return\n    } else {\n        console.log(\"sending notifications:\" + JSON.stringify(notifications[0]))\n\n        try {\n            var chunks = expo.chunkPushNotifications(notifications)\n        } catch (e) {\n            console.error('Cannot send push notification! Expo error:', e)\n        }\n\n        (async () => {\n            for (let chunk of chunks) {\n                try {\n                    var receipts = await expo.sendPushNotificationsAsync(chunk)\n                    console.log(receipts)\n                } catch (error) {\n                    console.log(\"Error: sendPushNotificationsAsync\")\n                    console.error(error)\n                }\n            }\n        })()\n    }\n}\n"
  },
  {
    "path": "kernel/demos/sync9-chat/chat.css",
    "content": "@import url('https://fonts.googleapis.com/css2?family=Recursive:wght@300;400&display=swap');\n\nbody {\n    padding: 0;\n    margin: 0;\n    width: 100vw;\n    height: 100vh;\n    box-sizing: border-box;\n}\n.grid-container {\n    width: 100%;\n    height: 100%;\n    display: grid;\n    font-family: sans-serif;\n    grid-template-columns: auto;\n    grid-template-rows: 40px auto 85px 1.5em;\n    grid-template-areas: \n    \"title\"\n    \"messages\"\n    \"input\"\n    \"typing\";\n}\n.grid-container > * {\n    padding: 0 10%;\n}\nheader {\n    grid-area: title;\n    display: flex;\n    box-shadow: 0 2px 1px 0 rgba(0, 0, 0, 0.2);\n    z-index: 1;\n}\n.title {\n    margin: 0 5px 0 2px;\n    user-select: none;\n    align-self: center;\n}\n#settings-hover-container {\n    display: flex;\n    flex-direction: row;\n    align-self: center;\n    margin: 0;\n    padding: 0;\n    user-select: none;\n}\n.settings {\n    display: inline-block;\n    height: 21px;\n    width: 21px;\n    text-align: center;\n    align-self: flex-end;\n    color: #666;\n    padding: 5px;\n}\n.settings-expand {\n    align-self: center;\n    font-family: sans-serif;\n    margin: 0 5px;\n    color: #666;\n}\n.settings-input-collapse {\n    transform-origin: left;\n    max-width: 20ch;\n    border: 1.5px solid #999;\n    border-radius: 5px;\n    padding: 2px 3px 1px 4px;\n    height: 1.5em;\n    margin-left: 0;\n}\n#stats {\n    font-size: 10px;\n    font-family: sans-serif, sans, helvetica, arial;\n    color: #444;\n    text-align: right;\n    position: fixed;\n    right: 50px;\n    top: 3px;\n}\n#react-messages {\n    grid-area: messages;\n    overflow-y: scroll;\n    overflow-x: hidden;\n}\n.messageBox {\n    display: flex;\n    flex-direction: column;\n    width: 100%;\n}\n.messageBox:empty::after {\n    content: \"It's too quiet in here...\";\n    font-style: italic;\n    color: #777;\n    font-size: 1.1875em;\n    padding: 4px 10% 4px 5px;\n    width: 100%;\n    text-align: center;\n}\n.messageBox .msg {\n    width: 100%;\n    white-space: pre-wrap;\n    display: inline-block;\n    padding: 4px 10% 4px 5px;\n}\n.messageBox .msg.msg-collapse {\n    padding-bottom: 2px;\n    padding-top: 0;\n}\n.messageBox .msg .user-id {\n    color: #356;\n    font-weight: 600;\n}\n.messageBox .msg .timestamp {\n    color: #666;\n    font-size: 12px;\n    margin-left: 5px;\n}\n.msg.live {\n    opacity: 50%;\n    user-select: none;\n}\n.input {\n    grid-area: input;\n    display: flex;\n    margin-top: -1px;\n    margin-bottom: 2px;\n    z-index: 1;\n}\n#send-box {\n    height: auto;\n    flex-grow: 1;\n    resize: none;\n    font-size: 16px;\n    font-family: sans-serif;\n    padding: 5px;\n    border-radius: 4px;\n}\n#send-msg {\n    display: inline-block;\n    width: 45px;\n    height: 45px;\n    border-radius: 100%;\n    line-height: 45px;\n    \n    text-align: center;\n    align-self: center;\n    margin: 6px;\n\n    cursor: pointer;\n\n    font-family: sans-serif;\n    color: white;\n    background-color: #48d;\n    border: 4px solid #7ac;\n    user-select: none;\n}\n/* Typing Indicators */\n#typing {\n    grid-area: typing;\n    user-select: none;\n}\n#typing.hidden {\n    display: none;\n}\n#typing-text {\n    line-height: 1.2em;\n    color: #666;\n    user-select: none;\n}\n@keyframes typing-dots {\n    from {\n        transform: translateY(0px) scale(1);\n        opacity: 40%;\n    }\n    to {\n        transform: translateY(-5px) scale(1.3);\n        opacity: 80%;\n    }\n    /*to: {\n        transform: translateY(0px) scale(1);\n        opacity: 40%;\n    }*/\n}\n.typing-anim span {\n    background-color: black;\n    display: inline-block;\n    width: 4px;\n    height: 4px;\n    vertical-align: middle;\n    border-radius: 2px;\n\n    animation-name: typing-dots;\n    animation-iteration-count: infinite;\n    animation-duration: 0.6s;\n    animation-direction: alternate;\n    animation-timing-function: cubic-bezier(1,-0.06,.87,1.13);\n}\n.typing-anim span:nth-child(2) {\n    animation-delay: 0.2s;\n}\n.typing-anim span:nth-child(3) {\n    animation-delay: 0.4s;\n}"
  },
  {
    "path": "kernel/demos/sync9-chat/chat.html",
    "content": "<!doctype html>\n<html>\n<head>\n   <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">\n   <meta charset=\"utf-8\">\n   <title>Braid Chat</title>\n   <link rel=\"stylesheet\" href=\"chat.css\" type=\"text/css\">\n   <link rel=\"stylesheet\" href=\"mobile.css\" type=\"text/css\">\n\n   <!-- Load libraries-->\n   <script type=\"module\">\n      import { openDB, deleteDB, wrap, unwrap } from 'https://unpkg.com/idb?module'\n  \n      window.idb = {\n         openDB, deleteDB, wrap, unwrap\n      }\n    </script>\n   <script src=\"braid-bundle.js\" type=\"text/javascript\"></script>\n   <script src=\"https://unpkg.com/react@16/umd/react.development.js\"></script> \n   <script src=\"https://unpkg.com/react-dom@16/umd/react-dom.development.js\"></script>\n   <script src=\"client.js\" type=\"text/javascript\"></script>\n</head>\n\n<body onload=\"screen_size()\" >\n   <span id=\"stats\"></span>\n   <div class=\"grid-container\" id=\"grid-container\">\n      <header>\n         <h2 class=\"title\" style=\"display:inline;\">Braid Chat</h2>\n         <div id=\"settings-hover-container\">\n            <span class=\"settings-expand\" id=\"username-btn\">Username:</span>\n            <input id=\"username-change\" class=\"settings-expand settings-input-collapse collapsed\"></input>\n            <a href=\"settings\"><img src =\"https://image.flaticon.com/icons/png/512/126/126472.png\" id = \"settings-icon\"></a>\n         </div>\n      </header>\n      <div id=\"react-messages\"></div>\n      <div class=\"input\">\n         <textarea id=\"send-box\" oninput=\"input_size()\"></textarea>\n         <span id=\"send-msg\"><img src=\"white-airplane.png\"></span>\n      </div>\n      <div id=\"typing\" class=\"hidden\">\n         <span class=\"typing-anim\">\n            <span></span>\n            <span></span>\n            <span></span>\n         </span>\n         <span id=\"typing-text\"></span>\n      </div>\n   </div>\n   <input id=\"expo-token\" style=\"display:none\"></input>\n   \n</body>\n<script src=\"chat.js\" type=\"text/javascript\"></script>\n</html>\n"
  },
  {
    "path": "kernel/demos/sync9-chat/chat.js",
    "content": "// Create a node\nvar browser_id = localStorage.browser_id || localStorage.browserId || 'B-' + Math.random().toString(36).slice(2)\nvar escaped_id = JSON.stringify(browser_id)\nvar use_leadertab = false\nvar use_invisible_server = false\nlocalStorage.browser_id = browser_id\n\nvar node\nif (!use_leadertab)\n    node = require('node.js')({\n        pid: (localStorage.username &&\n              localStorage.username + '-' + Math.random().toString(36).slice(2,6))\n    })\n\nprint_network = true;\ng_show_protocol_errors = true;\nvar params = new URLSearchParams(window.location.search);\nvar protocol = (params.get(\"protocol\") === 'http' ? 'http' : 'ws') + (window.location.protocol === 'https:' ? 's' : '')\nvar braid_url = `${protocol}://${window.location.host}/`\n\nconsole.log('protocol is ' + protocol)\n\nif (use_invisible_server)\n    braid_url = 'wss://invisible.college:3009/'\nif (!use_leadertab)\n    var socket = require(protocol == 'https' ? 'http-client-old.js' : 'websocket-client.js')({node, url: braid_url})\n\n\n// UI Code\nlet create_listeners = function () {\n    if (use_leadertab)\n        node = require('leadertab-shell.js')(braid_url)\n    node.fissure_lifetime = 1000 * 60 * 60 * 24 // Fissures expire after 24 hours\n\n    node.default('/chat', [])\n    node.default('/usr',  {})\n\n    // Local copy of variables\n    let users = {}\n    let messages = []\n    // How many milliseconds each keypress flags us as typing for\n    var typing_timeout = 30000\n    // How often to send live typing updates.\n    var live_type_update_freq = 50\n\n    // Subscribe for updates to a resource\n    node.get('/chat', update_messages)\n    node.get('/usr',  update_users)\n\n    window.addEventListener('beforeunload', function () {\n\t    set_not_typing()\n        node.forget('/chat', update_messages)\n        node.forget('/usr',  update_users)\n        node.close && node.close()\n    })\n    \n    //// ----- Messagebox rendering and interactability -----\n    var message_box = document.getElementById(\"react-messages\")\n    function render_username(user_id) {\n        return (user_id && users[user_id]) ? users[user_id].displayname : \"Anonymous\"\n    }\n\n    function format_header (msg) {\n        let timestamp = \"Live\"\n        if (msg.time) {\n            now = new Date()\n            msg_date = new Date(msg.time)\n            timestamp = now.getDate() === msg_date.getDate()\n                ? msg_date.toLocaleTimeString()\n                : msg_date.toLocaleDateString()\n        }\n\n        let username = render_username(msg.user)\n        return [React.createElement(\"span\", {className: \"user-id\", key:\"username\"}, username),\n                React.createElement(\"span\", {className: \"timestamp\", key: \"time\"}, timestamp)]\n    }\n    function format_message(msg, i, msgs, extra_classes) {\n        let collapse = i && (msgs[i-1].user == msg.user) && (msg.time - msgs[i-1].time < 1200000)\n        // Parse the message\n        let body = React.createElement(\"div\", {className: \"msg-body\", key: \"text\"}, msg.body)\n        let class_list = (extra_classes || []).concat(collapse ? [\"msg\"] : [\"msg\", \"msg-collapse\"]).join(' ')\n        if (collapse) {\n            return React.createElement('div', {className: class_list, key: i}, body)\n        } else {\n            let rendered_header = format_header(msg)\n            return React.createElement('div', {className: class_list, key: i},\n                [React.createElement(\"div\", {className: \"msg-header\", key: \"head\"}, rendered_header),\n                 body])\n        }\n    }\n    var typing_text_element = document.getElementById(\"typing-text\")\n    var typing_box = document.getElementById(\"typing\")\n    function draw_typing_indicator(names) {\n        var n = names.length\n        typing_box.classList.toggle(\"hidden\", n == 0)\n        let typing_names\n        switch (n) {\n            case 0:\n                return\n            case 1:\n                typing_names = names[0]\n                break\n            case 2: \n                typing_names = `${names[0]} and ${names[1]}`\n                break\n            case 3:\n            case 4:\n            case 5:\n                names[n-1] = 'and ' + names[n-1]\n                typing_names = names.join(\", \")\n                break\n            default:\n                typing_names = \"Several people\"\n        }\n        typing_text_element.textContent = `${typing_names} ${(n > 1) ? \"are\" : \"is\"} typing...`\n    }\n    function update_users (new_users) {\n        users = new_users\n        if (!users.hasOwnProperty(browser_id)) {\n            set_username(generate_username())\n            return\n        }\n        name_box.value = users[browser_id].displayname\n        update_messages(messages)\n    }\n    function update_messages(new_val) {\n        // Check scrolling \n        let should_scroll = true\n        let n_messages = messages.length\n        if (n_messages) {\n            let furthest_scroll = document.getElementsByClassName(\"msg\")[n_messages - 1].getBoundingClientRect().top\n            let box_bottom = message_box.getBoundingClientRect().bottom\n            // If the last message is off the screen, we shouldn't scroll\n            should_scroll = box_bottom > furthest_scroll\n        }\n        let message_elements = new_val.map(format_message)\n\n        var live_classes = [\"live\"]\n        Object.entries(users).forEach(user => {\n            if (user[1].typing && user[0] != browser_id) {\n                let msg = {user: user[0], body: user[1].typing}\n                message_elements.push(format_message(msg, null, null, live_classes))\n            }\n        })\n\n        let message_list = React.createElement('div', {className: \"messageBox\", key: \"messages\"}, message_elements)\n        ReactDOM.render(\n            message_list,\n            message_box,\n            () => {\n                if (should_scroll)\n                    message_box.scrollTop = message_box.scrollHeight - message_box.clientHeight\n            }\n        )\n        messages = new_val\n\n        // Update the typing indicator\n        let whos_typing = Object.entries(users)\n            .filter(user => user[1].typing && user[0] != browser_id)\n            .map(user => user[1].displayname)\n        draw_typing_indicator(whos_typing)\n    }\n    //// ---- Input field handlers ----\n    function reset_text(){\n        let grid_container = document.getElementById(\"grid-container\")\n        let header_size = 40\n        if (screen.width < 800)\n            header_size = '100'\n\n        grid_container.style.gridTemplateRows = `${header_size}px auto 85px 1.5em`\n\n        var message_view = document.getElementById(\"react-messages\")\n        message_view.scrollTop = message_view.scrollHeight\n    }\n    // Enable sending of messages\n    let sendbox = document.getElementById(\"send-box\")\n    function submit() {\n        if (!sendbox.value.length)\n            return\n        // Preprocess outgoing message\n        let send_time = new Date().getTime()\n        let message_body = JSON.stringify([{\n            user: browser_id,\n            time: send_time,\n            body: sendbox.value\n        }])\n        node.setPatch('/chat', `[-0:-0] = ${message_body}`)\n        reset_text()\n        sendbox.value = \"\"\n        // Remove typing indicator\n        set_not_typing()\n    }\n    \n    let typing_timeout_id\n    let typing = false\n    setInterval(update_typing, live_type_update_freq)\n    function set_typing(text) {\n        // Refresh the AFK timeout\n        typing = true\n        clearTimeout(typing_timeout_id)\n        typing_timeout_id = setTimeout(set_not_typing, typing_timeout)\n    }\n    function set_not_typing () {\n        if (!users.hasOwnProperty(browser_id))\n            return\n        if (users[browser_id].typing)\n            node.setPatch('/usr', `[${escaped_id}].typing = false`)\n        users[browser_id].typing = false\n        typing = false\n    }\n    function update_typing() {\n        if (!users.hasOwnProperty(browser_id))\n            return\n        let last_check = users[browser_id].typing\n        let check = sendbox.value\n        // If the user has changed the textbox since last tick, and the local\n        // UI typing hasn't timed out\n        if (typing && last_check != check) {\n            node.setPatch('/usr', `[${escaped_id}].typing = ${JSON.stringify(check)}`)\n            users[browser_id].typing = check\n        }\n    }\n\n    document.getElementById(\"send-msg\").addEventListener(\"click\", submit)\n    sendbox.addEventListener(\"keydown\", e => {\n        if (e.keyCode == 13 && !e.shiftKey) {\n            e.preventDefault()\n            submit()\n        }\n    })\n    sendbox.addEventListener(\"input\", e => {\n        if (sendbox.value.length > 0)\n            set_typing()\n        else\n            set_not_typing()\n    })\n\n    // Username Changing\n    let name_box = document.getElementById(\"username-change\")\n\n    name_box.onchange = e => {\n        e.preventDefault()\n        let new_name = name_box.value.replace(/\\W/g, '')\n        // Change username\n        name_box.value = new_name\n        set_username(new_name)\n\n        let expo_token = document.getElementById(\"expo-token\")\n        if (expo_token.value !== \"\")\n            console.log(\"Mobile device found with expoToken:\" + expo_token.value)\n        else\n            console.log(\"Not using app\")\n    }\n\n    function generate_username () {\n        // Username generation stuff\n        var names = [\"Bob\", \"Alice\", \"Joe\", \"Fred\", \"Mary\", \"Linda\", \"Mike\", \"Greg\", \"Raf\"]\n        let name = names[Math.floor(Math.random() * names.length)]\n        let number = Math.floor(Math.random() * 1000)\n        return `${name}${number}`\n    }\n    function set_username (name) {\n        localStorage.username = name\n        let escaped_name = JSON.stringify(name)\n        var patch = users.hasOwnProperty(browser_id) \n            ? `[${escaped_id}].displayname = ${escaped_name}`\n            : `[${escaped_id}] = {\"displayname\": ${escaped_name}}`\n            \n        node.setPatch('/usr', patch)\n    }\n}\n\nif (document.readyState === \"complete\" ||\n    (document.readyState !== \"loading\" && !document.documentElement.doScroll))\n    create_listeners()\nelse\n    document.addEventListener(\"DOMContentLoaded\", create_listeners)\n\n// Update statistics ever N seconds\nfunction update_stats () {\n    var resource = node.resource_at('/usr')\n    var versions = node.versions('/usr')\n\n    // Compute how many versions are fully acknowledged\n    var acked = 0\n    versions.forEach(v => { if (!resource.acks_in_process[v]) acked++ })\n\n    // And count the fissures\n    var fissures           = node.fissures('/usr')\n    var unmatched_fissures = node.unmatched_fissures('/usr')\n\n    // Count how many obsolete versions are fizzed\n    var fizzed_vers = new Set([])\n    fissures.forEach(f => (f.versions || []).forEach(v => fizzed_vers.add(v)))\n    var obsoletes = 0\n    for (v of fizzed_vers)\n        if (!resource.time_dag[v])\n            obsoletes++\n\n    document.getElementById('stats').innerHTML =\n        `Acked Versions: ${acked}/${versions.length}<br>`\n        + `Unmatched Fissures: ${unmatched_fissures.length}/${fissures.length}`\n        + (obsoletes ? `<br>Obsolete Fizzed Versions: ${obsoletes}` : '')\n}\nnode.ons.push(() => setTimeout(update_stats))  // In a settimeout so it runs\nupdate_stats()                                 // after, not before processing\n                                               // the message\n"
  },
  {
    "path": "kernel/demos/sync9-chat/client.js",
    "content": "var public_vapid_key =\n  \"BB2ikt9eLJydNI-1LpnaRYiogis3ydcUEw6O615fhaHsOsRRHcMZUfVSTNqun6HVb44M6PdfviDJkMWsdTO7XcM\"\n\n\nasync function update_web_slider() {\n    console.log(\"update_web_slider\")\n    if (document.getElementById(\"web_slider\").checked)\n        await subscribe()\n    else\n        await unsubscribe()\n}\n\nasync function send_notification() {\n    await fetch(\"/message\", {\n        method: \"POST\",\n        body: JSON.stringify({\n            message: {\n                user: \"User\",\n                text:\"Message\"\n            }\n        }),\n        headers: {\n          \"content-type\": \"application/json\"\n        }\n    });\n}\n\n// Subscibes the user and sends a test notification\nasync function subscribe() {\n  var subscription_str = await get_subscription_string()\n  \n  // Send Push Notification\n  console.log(\"Sending Push...\" + subscription_str)\n  await fetch(\"/subscribe\", {\n    method: \"POST\",\n    body: subscription_str,\n    headers: { \"content-type\": \"application/json\" }\n  })\n  console.log(\"Push Sent...\")\n}\n\n// Returns a token for sending notifications to client\nasync function get_subscription_string () {\n  console.log(\"Registering service worker...\")\n  var register = await navigator.serviceWorker.register(\"worker.js\", {\n    scope: \"/chat/\"\n  })\n  console.log(\"Service Worker Registered...\")\n  console.log(\"Registering Push...\")\n  var subscription = await register.pushManager.subscribe({\n    userVisibleOnly: true,\n    applicationServerKey: url_base64_to_uint8_array(public_vapid_key)\n  })\n  console.log(\"Push Registered...\")\n  return JSON.stringify(subscription)\n}\n\n\n// Sends a server token, and tells it to remove from batch\nasync function unsubscribe() {\n  console.log(\"remove()\")\n  var subscription_str = await get_subscription_string()\n  // Send Push Notification\n  console.log(\"Sending Push for removal...\")\n  await fetch(\"/chat/unsubscribe\", {\n    method: \"POST\",\n    body: subscription_str,\n    headers: {\n      \"content-type\": \"application/json\"\n    }\n  })\n  console.log(\"Push Sent to remove user from list...\")\n}\n\nfunction url_base64_to_uint8_array(base64_string) {\n  var padding = \"=\".repeat((4 - base64_string.length % 4) % 4);\n  var base64 = (base64_string + padding)\n    .replace(/\\-/g, \"+\")\n    .replace(/_/g, \"/\");\n\n  var raw_data = window.atob(base64)\n  var output_array = new Uint8Array(raw_data.length)\n\n  for (let i=0; i < raw_data.length; ++i) \n    output_array[i] = raw_data.charCodeAt(i)\n\n  return output_array\n}\n\n// handles the size of input\nfunction input_size () {\n    let textarea = document.getElementById(\"send-box\")\n    let header_size = '40'\n    var ta_line_height = 45\n    if (screen.width < 800) {\n        header_size = '100'\n        ta_line_height = 45\n    }\n\n    let text_area_height = 85\n    let grid_container = document.getElementById(\"grid-container\")\n    grid_container.style.gridTemplateRows = `${header_size}px auto 85px 1.5em`\n    var ta_height = textarea.scrollHeight // Get the scroll height of the textarea\n    textarea.style.height = ta_height\n    var number_of_lines = Math.floor(ta_height/ta_line_height)\n    if (number_of_lines == 1)\n        grid_container.style.gridTemplateRows = `${header_size}px auto 85px 1.5em`\n    else if (number_of_lines == 2)\n        grid_container.style.gridTemplateRows = `${header_size}px auto 125px 1.5em`\n    else if (number_of_lines == 3)\n        grid_container.style.gridTemplateRows = `${header_size}px auto 175px 1.5em`\n    else if (number_of_lines >= 4)\n        grid_container.style.gridTemplateRows = `${header_size}px auto 220px 1.5em`\n\n    var message_view = document.getElementById(\"react-messages\")\n    message_view.scrollTop = message_view.scrollHeight\n}\n\n// If safari mobile, then the screen needs to be cut at the bottom\nfunction screen_size () {\n    if (screen.width < 800) {\n        var ua = navigator.userAgent.toLowerCase()\n        if (ua.indexOf('safari') !== -1) {\n            if (ua.indexOf('chrome') > -1) {\n                // Chrome\n            } else {\n                console.log(\"safari mobile\")\n                document.body.style.height = '90vh'\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "kernel/demos/sync9-chat/mobile.css",
    "content": "#send-msg img{   \n    width:65px;\n    height:65px;\n    \n}\n#send-msg {\n    position:absolute;\n    right:13%;\n    background-color:rgb(255,255,255,0);\n    border-radius: 0%;\n    border: 0px solid #fff;\n    margin-left:-100px;\n    margin-top:-20px;\n}\n#send-box {\n    padding-right:110px;\n    padding-left:5px;\n}\n\n#settings-icon {\n    position:absolute;\n    right:5px;\n    /* top:5px; */\n    width:30px;\n    height:30px;\n}\n/* css for phones */\n@media only screen and (max-device-width: 640px) {\n    .grid-container {\n        width: 100%;\n        height: 100%;\n        display: grid;\n        font-family: sans-serif;\n    \n        grid-template-columns: auto;\n        grid-template-rows: 100px auto 85px 1.5em;\n        grid-template-areas: \n        \"title\"\n        \"messages\"\n        \"input\"\n        \"typing\";\n    }\n    .grid-container > * {\n        padding: 0 5%;\n    }\n    /* Header size */\n    header {\n        font-size: 40px;\n        padding:30px;\n        height:100px;\n    }\n\n    input {\n        width:200px;\n        padding:15px;\n        font-size:27px;\n        font-family: \"Times New Roman\", Times, serif;\n        font-family: sans-serif;\n    }\n    #settings-hover-container {\n       margin-left:20px;\n    }\n    #settings-icon {\n        float:right;\n        right:14px;\n        width:45px;\n        height:45px;\n    }\n    /* Messages */\n    .messageBox .msg .user-id {\n        font-weight: 600;\n        font-size:35px;\n    }\n    .messageBox .msg .timestamp {\n        font-size: 16px;\n        margin-left: 10px;\n    }\n    .messageBox .msg {\n        font-size: 32px;\n        margin-left: 5px;\n    }\n    .messageBox {\n        margin-bottom:15px;\n    }\n    /* Input and send */\n    #send-box {\n        font-family: \"Times New Roman\", Times, serif;\n        font-family: sans-serif;\n        height: auto;\n        flex-grow: 3;\n        font-size: 45px;\n        /* padding: 20px; */\n        padding-right:120px;\n        padding-left:15px;\n        line-height:45px;\n        padding-top:17.5px;\n        padding-bottom:0px;\n        margin-bottom:0;\n    }\n\n    #send-msg { \n        position:absolute;\n        right:10%;\n        display: inline-block;\n        background-color:rgb(255,255,255,0);\n        border-radius: 0%;\n        border: 0px solid #fff;\n        \n        text-align: center;\n        align-self: center;\n        margin: 6px;\n\n        cursor: pointer;\n        user-select: none;\n        margin-top:-15px;\n        /* margin-left:-450px; */\n    }\n    #send-msg img{  \n        /* for the white airplane */\n        width:70px;\n        height:65px;\n       \n        /*for the black airplane*/\n        /* width:85px;\n        height:85px;\n        margin-left:13px;\n        margin-top:-3px;  */\n    }\n}"
  },
  {
    "path": "kernel/demos/sync9-chat/package.json",
    "content": "{\n  \"name\": \"sync9-chat\",\n  \"version\": \"0.0.1\",\n  \"description\": \"\",\n  \"author\": \"Braid Working Group\",\n  \"repository\": \"braid-org/braidjs\",\n  \"homepage\": \"https://braid.org\",\n  \"main\": \"server.js\",\n  \"dependencies\": {\n    \"better-sqlite3\": \"^5.4.3\",\n    \"dotenv\": \"^8.2.0\",\n    \"expo-server-sdk\": \"^3.5.1\",\n    \"express\": \"^4.17.1\",\n    \"idb\": \"^5.0.4\",\n    \"web-push\": \"^3.4.4\"\n  }\n}\n"
  },
  {
    "path": "kernel/demos/sync9-chat/settings.css",
    "content": "body {\n  font-family: sans-serif;\n  padding:10%;\n  text-align:center;\n}\n\n#home-icon {\n  display:inline;\n  width:30px;\n  height:30px;\n}\n\n.switch {\n  position: relative;\n  display: inline-block;\n  width: 30px;\n  height: 17px;\n}\n\n.switch input { \n  opacity: 0;\n  width: 0;\n  height: 0;\n}\n\n.slider {\n  position: absolute;\n  cursor: pointer;\n  top: 0;\n  left: 0;\n  right: 0;\n  bottom: 0;\n  background-color: #ccc;\n  -webkit-transition: .4s;\n  transition: .4s;\n}\n\n.slider:before {\n  position: absolute;\n  content: \"\";\n  height: 13px;\n  width: 13px;\n  left: 2px;\n  bottom: 2px;\n  background-color: white;\n  -webkit-transition: .4s;\n  transition: .4s;\n}\n\ninput:checked + .slider {\n  background-color: #2196F3;\n}\n\ninput:focus + .slider {\n  box-shadow: 0 0 1px #2196F3;\n}\n\ninput:checked + .slider:before {\n  -webkit-transform: translateX(13px);\n  -ms-transform: translateX(13px);\n  transform: translateX(13px);\n}\n\n/* Rounded sliders */\n.slider.round {\n  border-radius: 17px;\n}\n\n.slider.round:before {\n  border-radius: 50%;\n}"
  },
  {
    "path": "kernel/demos/sync9-chat/settings.html",
    "content": "<!DOCTYPE html>\n<html lang=\"en\">\n\n<head>\n  <meta charset=\"UTF-8\">\n  <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">\n  <meta http-equiv=\"X-UA-Compatible\" content=\"ie=edge\">\n  <title>BraidChat Settings</title>\n  <link rel=\"stylesheet\" href=\"settings.css\" type=\"text/css\">\n  <script src=\"client.js\"></script>\n</head>\n\n<body>\n  <h1>BraidChat Settings</h1>\n  <a href=\"/braidchat\"><img src = \"https://image.flaticon.com/icons/svg/25/25694.svg\" id = \"home-icon\"> </a><br><br>\n</script>\n  Web Notifications: \n  <label class=\"switch\">\n      <input type=\"checkbox\" onchange=\"update_web_slider()\" id=\"web_slider\">\n      <span class=\"slider round\"></span>\n    </label>\n    <br><br>\n    Test mobile: \n    <label class=\"switch\">\n        <input type=\"checkbox\" onchange=\"send_notification()\" id=\"mobile_web_slider\">\n        <span class=\"slider round\"></span>\n      </label>\n      <br>\n</body>\n\n</html>\n"
  },
  {
    "path": "kernel/demos/sync9-chat/worker.js",
    "content": "console.log(\"Service Worker Loaded...\");\n\nself.addEventListener(\"push\", e => {\n  const data = e.data.json();\n  console.log(\"Push Recieved...\");\n  self.registration.showNotification(data.title, {\n    body: \"Notified by braid\",\n    icon: \"/icon.png\"\n  });\n});\n"
  },
  {
    "path": "kernel/demos/wiki/wiki-client.html",
    "content": "<script src=\"/braid-bundle.js\"></script>\n<script src=\"https://invisible.college/js/marked.min.js\"></script>\n\n<link rel=\"stylesheet\" href=\"https://invisible.college/css/github-markdown.css\">\n\n<body></body>\n<script>\n\ndocument.body.style.border = '3px solid transparent'\nvar real_errorrr_happened = false\nwindow.onerror = function (e) {\n    real_errorrr_happened = true\n    document.body.style.border = '4px red solid'\n}\n\nvar output = document.createElement('div')\noutput.className = 'pad'\noutput.style.maxWidth = '900px'\ndocument.body.append(output)\n\nvar bottom_pad = document.createElement('div')\nbottom_pad.style.height = '50vh'\nbottom_pad.style.display = 'none'\ndocument.body.append(bottom_pad)\n\nvar t = document.createElement('textarea')\nt.style.position = 'fixed'\nt.style.bottom = '0px'\nt.style.right = '0px'\nt.style.width = '100%'\nt.style.height = '50vh'\nt.style.display = 'none'\nt.style.fontSize = '15px'\nt.style.fontFamily = 'helvetica, monaco, lucida grande, avenir'\ndocument.body.append(t)\n\nvar c = document.createElement('canvas')\nc.style.position = 'fixed'\nc.style.top = '0px'\nc.style.left = '0px'\nc.style.width = '100%'\nc.style.height = '50vh'\nc.style.display = 'none'\ndocument.body.append(c)\nvar g = c.getContext('2d')\n\nvar edit = document.createElement('div')\nedit.style.position = 'fixed'\nedit.style.bottom = '0px'\nedit.style.right = '0px'\nedit.style.padding = '30px'\nedit.style.cursor = 'pointer'\nedit.style.textDecoration = 'underline'\nedit.style.backgroundColor = 'rgba(255, 255, 255, .5)'\nedit.onclick = toggle_editor\nedit.innerText = 'edit'\ndocument.body.append(edit)\n\nvar stats = document.createElement('div')\nstats.style.position = 'fixed'\nstats.style.top = '0px'\nstats.style.right = '0px'\nstats.style.padding = '30px'\nstats.style.backgroundColor = 'rgba(255, 255, 255, .5)'\nstats.onclick = toggle_editor\nstats.innerText = '_'\ndocument.body.append(stats)\n\nvar differ = document.createElement('div')\ndiffer.style.position = 'fixed'\ndiffer.style.bottom = '0px'\ndiffer.style.right = '30px'\ndiffer.style.fontSize = '10px'\ndiffer.style.cursor = 'pointer'\ndiffer.style.textDecoration = 'underline'\ndiffer.style.backgroundColor = 'rgba(100, 100, 100, .2)'\ndiffer.onclick = (e) => {showing_diff=!showing_diff; update_markdown()}\ndiffer.innerText = 'show diffs'\n//document.body.append(differ)\n\nvar timer\nvar render_delay = 100\nfunction update_markdown_later() {\n    if (timer) clearTimeout(timer)\n    timer = setTimeout(update_markdown, render_delay)\n}\nfunction update_markdown() {\n    timer = null\n    var source = showing_diff ? html_diffed(last_version||'', t.value) : t.value\n    output.innerHTML = marked(source, {sanitize: !is_safe})\n    document.body.className = 'nopad'\n}\nupdate_markdown()\n\nvar is_safe = window.location.href.match(/^https?:\\/\\/wiki\\./)\nvar page_key = is_safe ? '/wiki' + window.location.pathname : window.location.pathname\n\nvar vert = true, editing = false, visualizing = false, update_vis = null\nfunction render () {\n    t.style.display = editing ? null : 'none'\n    bottom_pad.style.display = (editing && vert) ? null : 'none'\n    c.style.display = visualizing ? null : 'none'\n\n    if (vert) {\n        t.style.width = '100%'\n        t.style.height = '50vh'\n        output.style.width = null\n        c.style.width = '100%'\n        c.style.height = '50vh'\n    } else {\n        t.style.width = '45vw'\n        t.style.height = '100%'\n        output.style.width = editing ? '55vw' : null\n        c.style.width = '55vw'\n        c.style.height = '100%'\n    }\n\n    c.width = c.offsetWidth * devicePixelRatio\n    c.height = c.offsetHeight * devicePixelRatio\n    update_vis && update_vis()\n}\n\nwindow.addEventListener('resize', () => {\n    c.width = c.offsetWidth * devicePixelRatio\n    c.height = c.offsetHeight * devicePixelRatio\n    update_vis && update_vis()\n})\n\nfunction update_stats() {\n    stats.innerText = 'Time is length: ' + \n        Object.keys(node.resource_at(page_key).time_dag).length +\n        ' or ' + (JSON.stringify(node).length / t.value.length).toFixed(2) + ' bytes/byte'\n}\n\nvar first_time = true\nfunction toggle_editor () {\n    editing = !editing\n    render()\n    if (editing) t.focus()\n    if (editing && first_time) {\n        first_time = false\n        t.setSelectionRange(0,0)\n        t.scrollTop = 0\n    }\n    update_markdown()\n}\n\ndocument.body.onkeydown = function (e) {\n    var mods = 0\n    for (k in {ctrlKey:1, shiftKey:1, altKey:1, metaKey:1})\n        if (e[k]) mods += 1\n\n    if (e.keyCode == 27\n        //|| (mods >= 2 && e.keyCode == 32)\n        ) {\n        e.stopPropagation()\n        toggle_editor()\n    }\n}\n\nwindow.onresize = function () {\n    var w = window.innerWidth, h = window.innerHeight\n    if (w < 1200 !== vert) {\n        vert = !vert\n        render()\n    }\n}\nonresize()\nrender()\n\nfunction handle_pasted_images (el, cb) {\n    el.addEventListener(\"paste\", function(e) {\n        // 1. Let's look for an image in the clipboard data\n        if (!e.clipboardData || !e.clipboardData.items) return\n        var items = e.clipboardData.items\n        var blob\n        for (var i=0; i<items.length; i++) {\n            if (items[i].type.indexOf(\"image\") === -1) continue\n            blob = items[i].getAsFile()\n        }\n        if (!blob) return\n\n        // 2. Now we have the pasted image as a blob.  Let's convert it to a data: url\n        var reader = new FileReader()\n        reader.addEventListener('load', ()=> {cb(reader.result)}, false)\n        reader.readAsDataURL(blob)\n    }, false)\n}\nhandle_pasted_images(window, x => {\n    insert_at_cursor(t, '<img src=\"'+x+'\">')\n})\n\nfunction insert_at_cursor (textarea, string) {\n    // IE support\n    if (document.selection) {\n        textarea.focus()\n        sel = document.selection.createRange()\n        sel.text = string\n    }\n    // Mozilla and others\n    else if (textarea.selectionStart || textarea.selectionStart == '0') {\n        var startPos = textarea.selectionStart\n        var endPos = textarea.selectionEnd\n        textarea.value = textarea.value.substring(0, startPos)\n            + string\n            + textarea.value.substring(endPos, textarea.value.length)\n    } else\n        textarea.value += string;\n}\n\nvar last_version\nvar showing_diff = false\naddEventListener('keypress', function (e) {\n    if (e.ctrlKey && e.key === 's') {\n        last_version = t.value\n        event.preventDefault()\n        update_markdown()\n    }\n})\n\ncolor = (s, c) =>\n    s.split('\\n\\n').map(x => '<span style=\"background-color: '+ c +'\">'+ x +'</span>').join('\\n\\n')\n\ngreen = (s) => color (s, '#cfc')\nred   = (s) => color (s, '#fcc')\nfunction html_diffed (Old, New) {\n    var diff = diff_main(Old, New)\n    var html = diff.map(\n        (x) => x[0] == 1 ? green(x[1])\n            : x[0] == -1 ? red(x[1])\n            : x[1]\n    ).join('')\n    //console.log(JSON.stringify(marked(html, {sanitize:false})))\n    return html\n}\n\n\nvar ting = null\nfunction scroll () {\n    // We only scroll to the ting once -- if it's fresh\n    if (ting || location.hash.length === 0) return\n\n    ting = document.getElementById(location.hash.substr(1))\n    ting && ting.scrollIntoView()\n}\nfor (i=0; i<50; i++)\n    setTimeout(scroll, i / 5.0 * 1000)\n\nt.addEventListener('input', e => {\n    update_markdown_later()\n})\n\nsync_editor(t)\nasync function sync_editor(t) {\n    var prev_text = ''\n    var node = window.node = require('./node.js')({pid: 'C-' + Math.random().toString(36).slice(2, 12)})\n    node.fissure_lifetime = 1000*60*60*8 // 8 hours\n    node.default(page_key, {cursors: {[node.pid]: {start: 0, end: 0, time: Date.now()}}, text: ''})\n    var ws_client = require('./websocket-client.js')({node, url: 'ws://localhost:3007/'})\n    // var ws_client = require('./websocket-client.js')({node})\n\n    var cursor_lifetime = 1000*60*60*24*7 // week\n\n    var ready = false\n    var setting = 0\n    function send_diff(from, to) {\n        if (!ready) return\n        setting++\n        node.set(page_key, null, diff_convert_to_my_format(diff_main(from, to)).map(x =>\n            `.text[${x[0]}:${x[0] + x[1]}] = ${JSON.stringify(x[2])}`\n        ))\n        setting--\n    }\n\n    function send_cursor_update(start, end) {\n        if (!ready) return\n        setting++\n        node.set(page_key, null, [\n            `.cursors[${JSON.stringify(node.pid)}] = ${JSON.stringify({start: {type: 'location', path: `.text[${start}]`}, end: {type: 'location', path: `.text[${end}]`}, time: Date.now()})}`])\n        setting--\n    }\n\n    var cb = x => {\n        ready = true\n        if (setting) return\n        console.assert(node.resource_at(page_key).weve_been_welcomed === true)\n        prev_text = t.value = x.text\n\n        var sel = x.cursors[node.pid]\n        if (sel) t.setSelectionRange(sel.start, sel.end)\n        else t.setSelectionRange(0, 0)\n\n        update_markdown_later()\n    }\n    node.get(page_key, cb)\n\n    node.ons.push((method, arg) => {\n        if (method != 'welcome' && method != 'fissure') return\n        if (arg.key != page_key) return\n\n        var fs = {}\n        if (method == 'welcome') {\n            for (let f of arg.fissures)\n                fs[`${f.a}:${f.b}:${f.conn}`] = f\n        } else {\n            let f = arg.fissure\n            fs[`${f.a}:${f.b}:${f.conn}`] = f\n        }\n\n        var rest = () => {\n            var o = node.resource_at(page_key).mergeable.read()\n            if (!o || !o.cursors) return\n\n            Object.assign(fs, node.resource_at(page_key).fissures)\n    \n            var delete_us = {}\n            Object.values(fs).forEach(f => {\n                if (!fs[`${f.b}:${f.a}:${f.conn}`]) {\n                    if (o.cursors[f.b]) delete_us[f.b] = true\n                }\n            })\n\n            var now = Date.now()\n            Object.entries(o.cursors).forEach(([k, v]) => {\n                if (k != node.pid && v.time <= now - cursor_lifetime) delete_us[k] = true\n            })\n\n            var patches = Object.keys(delete_us).map(k => `delete .cursors[${JSON.stringify(k)}]`)\n            if (patches.length) node.set(page_key, null, patches)\n        }\n        if (method == 'welcome') setTimeout(rest, 0)\n        else rest()\n    })    \n\n    node.on_errors.push((key, origin) => {\n        prev_text = t.value = ''\n        t.setSelectionRange(0, 0)\n\n        delete node.resources[key]\n        node.unbind(key, origin)\n\n        var subscribe = ws_client.pipe.subscribed_keys[key].we_requested\n        delete ws_client.pipe.subscribed_keys[key].we_requested\n\n        ws_client.pipe.send({key, subscribe, method: 'get'})\n    })\n\n    t.addEventListener('input', e => {\n        var selectionStart = t.selectionStart\n        var selectionEnd = t.selectionEnd\n        send_diff(prev_text, t.value)\n        send_cursor_update(selectionStart, selectionEnd)\n        prev_text = t.value\n\n        update_stats()\n    })\n    add_selection_listener(t, send_cursor_update)\n\n    window.addEventListener('beforeunload', function () {\n        setting++\n        if (ready) node.set(page_key, null, [`delete .cursors[${JSON.stringify(node.pid)}]`])\n        node.forget(page_key, cb)\n        setting--\n    })\n\n    stats.onclick = () => {\n        visualizing = !visualizing\n        render()\n    }\n\n    update_vis = () => {\n        if (visualizing) {\n            draw_resource(c, g, window.node.resource_at(page_key))\n        }\n        update_stats()\n    }\n\n    var prev_timer = 0\n    window.node.ons.push(() => {\n        if (prev_timer) clearTimeout(prev_timer)\n        prev_timer = setTimeout(update_vis, 0)\n    })\n}\n\nfunction add_selection_listener(t, cb) {\n    var prev_start = t.selectionStart\n    var prev_end = t.selectionEnd\n    \n    function handler() {\n        if (t.selectionStart != prev_start || t.selectionEnd != prev_end) cb(t.selectionStart, t.selectionEnd)\n        prev_start = t.selectionStart\n        prev_end = t.selectionEnd\n    }\n    \n    t.addEventListener('mousedown', e => setTimeout(handler, 0))\n    t.addEventListener('mouseup', e => setTimeout(handler, 0))\n    t.addEventListener('mousemove', e => setTimeout(handler, 0))\n    t.addEventListener('input', handler)\n    t.addEventListener('keydown', e => setTimeout(handler, 0))\n}\n\nvar tau = Math.PI * 2\n\nfunction draw_resource(c, g, res) {\n    g.fillStyle = 'white'\n    g.fillRect(0, 0, c.width, c.height)\n\n    // g.fillStyle = 'black'\n    // draw_text(c, g, 'fissures', 0, 10)\n    // g.fillStyle = 'lightgrey'\n    // g.fillRect(0, 20, 200, 200)\n    // draw_fissure_dag(c, g, res.fissures, 0, 20, 200, 200, 10)\n    \n    g.fillStyle = 'black'\n    draw_text(c, g, 'time dag', 0, 10)\n    draw_time_dag(c, g, res, 0, 20, 200, 10)\n    \n    g.fillStyle = 'black'\n    draw_text(c, g, 'space dag', 250, 10)\n    \n    var x = {elems : '', deleted_by: {}, nexts: []}\n    if (res.space_dag) {\n        if (res.space_dag.t == 'lit') x = {elems : res.space_dag.S.text, deleted_by: {}, nexts: []}\n        else {\n            x = space_dag_get(res.space_dag.S, 0)\n            if (typeof(x.S.text) == 'string') x = {elems : x.S.text, deleted_by: {}, nexts: []}\n            else x = x.S.text.S.elems[0].S\n        }\n    }\n    draw_space_dag(c, g, x, 250, 30)\n}\n\nfunction draw_text(c, g, text, x, y, color, x_align, y_align, font) {\n    g.font = font || '15px Arial'\n    if (color) g.fillStyle = color\n    g.textAlign = x_align || 'left'\n    g.textBaseline = y_align || 'middle'\n    g.fillText(text, x, y)\n}\n\nfunction draw_fissure_dag(c, g, fissures, x, y, w, h, r) {\n    var fs = {}\n    Object.values(fissures).forEach(f => {\n        var ff = fs[f.conn]\n        if (!ff) {\n            var rand = Math.create_rand(f.conn)\n            ff = fs[f.conn] = {\n                id: f.conn,\n                color: '#' + rand().toString(16).slice(2, 8),\n                radius: r * (1 + rand()),\n                parents: {}\n            }\n        }\n        if (f.a < f.b) ff.has_side_a = true\n        if (f.b < f.a) ff.has_side_b = true\n        \n        Object.keys(f.parents).forEach(p => {\n            if (!fissures[p]) {\n                ff.has_issue = true\n                return\n            }\n            ff.parents[fissures[p].conn] = true\n        })\n    })\n    \n    function get_layer(k) {\n        if (fs[k].layer) return fs[k].layer\n        return fs[k].layer = Object.keys(fs[k].parents).reduce((x, p) => {\n            return Math.max(x, get_layer(p) + 1)\n        }, 0)\n    }\n    Object.keys(fs).forEach(get_layer)\n    \n    var layer_members = {}\n    var num_layers = 0\n    Object.values(fs).forEach(f => {\n        layer_members[f.layer] = layer_members[f.layer] || []\n        layer_members[f.layer].push(f.id)\n        \n        if (f.layer >= num_layers) num_layers = f.layer + 1\n    })\n    \n    Object.values(layer_members).forEach(layer => {\n        layer.sort().forEach((k, i) => {\n            fs[k].layer_i = i\n        })\n    })\n\n    function get_node_pos(f) {\n        var layer_count = layer_members[f.layer].length\n        return [\n            lerp(0, x + r, layer_count, x + w - r, f.layer_i + 0.5),\n            y + r + (f.layer * r*4)\n        ]\n    }\n\n    Object.values(fs).forEach(f => {\n        var a = get_node_pos(f)\n        g.beginPath()\n        Object.keys(f.parents).map(x => fs[x]).forEach(p => {\n            var b = get_node_pos(p)\n            g.moveTo(a[0], a[1])\n            g.lineTo(b[0], b[1])\n        })\n        g.lineWidth = 3\n        g.strokeStyle = 'lightblue'\n        g.stroke()\n    })\n    \n    Object.values(fs).forEach(f => {\n        var node_pos = get_node_pos(f)\n        \n        var color = f.color\n        var rr = f.radius\n        \n        g.beginPath()\n        g.arc(node_pos[0], node_pos[1], rr, 0, tau)\n        g.fillStyle = f.has_issue ? 'red' : 'white'\n        g.fill()\n        \n        g.beginPath()\n        if (f.has_side_a) {\n            g.arc(node_pos[0], node_pos[1], rr, tau/4, tau*3/4)\n        }\n        if (f.has_side_b) {\n            g.arc(node_pos[0], node_pos[1], rr, tau*3/4, tau/4)\n        }\n        g.strokeStyle = color\n        g.lineWidth = 2\n        g.stroke()\n    })\n}\n\nfunction draw_time_dag(c, g, res, x, y, w, r) {\n    g.lineWidth = 3\n    \n    var vs = {}\n    function get_layer(v) {\n        if (!vs[v]) vs[v] = {vid: v}\n        if (vs[v].layer) return vs[v].layer\n        return vs[v].layer = Object.keys(res.time_dag[v]).reduce((x, p) => {\n            return Math.max(x, get_layer(p) + 1)\n        }, 0)\n    }\n    Object.keys(res.time_dag).forEach(get_layer)\n    \n    var layer_members = {}\n    var num_layers = 0\n    Object.values(vs).forEach(v => {\n        layer_members[v.layer] = layer_members[v.layer] || []\n        layer_members[v.layer].push(v.vid)\n        \n        if (v.layer >= num_layers) num_layers = v.layer + 1\n    })\n    \n    Object.values(layer_members).forEach(layer => {\n        layer.sort().forEach((v, i) => {\n            vs[v].layer_i = i\n        })\n    })\n\n    function get_node_pos(v) {\n        var layer_count = layer_members[v.layer].length\n        return [\n            lerp(0, x + r, layer_count + 1, x + w - r, v.layer_i + 1),\n            y + r + (v.layer * r*3)\n        ]\n    }\n\n    Object.entries(vs).forEach(e => {\n        var a_pos = get_node_pos(e[1])\n        g.beginPath()\n        Object.keys(res.time_dag[e[0]]).forEach(p => {\n            g.moveTo(a_pos[0], a_pos[1])\n            \n            var b_pos = get_node_pos(vs[p])\n            g.lineTo(b_pos[0], b_pos[1])\n        })\n        g.strokeStyle = 'lightblue'\n        g.stroke()\n    })\n    \n    var fully_acked = {}\n    function mark_fully_acked_rec(v) {\n        if (!fully_acked[v]) {\n            fully_acked[v] = true\n            Object.keys(res.time_dag[v]).forEach(mark_fully_acked_rec)\n        }\n    }\n    Object.keys(res.acked_boundary).forEach(mark_fully_acked_rec)\n\n    Object.entries(vs).forEach(e => {\n        var node_pos = get_node_pos(e[1])\n        \n        g.beginPath()\n        g.arc(node_pos[0], node_pos[1], r, 0, tau)\n        g.fillStyle = 'white'\n        g.fill()\n        \n        if (res.acks_in_process[e[0]]) {\n            var current_count = Math.max(0, res.acks_in_process[e[0]].count)\n            var max_count = current_count + 1\n            \n            var percent_done = (max_count - current_count) / max_count\n            if (percent_done > 0) {\n                g.beginPath()\n                g.arc(node_pos[0], node_pos[1], r, 0, tau/2, true)\n                if (percent_done == 1) {\n                    g.arc(node_pos[0], node_pos[1], r, tau/2, 0, true)\n                } else if (percent_done < 0.5) {\n                    var x = lerp(0, r, 0.5, 0, percent_done)\n                    var C = (r*r - x*x) / (2*x)\n                    var angle = Math.atan2(r, C)\n                    g.arc(node_pos[0], node_pos[1] + C, C + x, tau*3/4 - angle, tau*3/4 + angle)\n                } else if (percent_done > 0.5) {\n                    var x = lerp(0.5, 0, 1, r, percent_done)\n                    var C = (r*r - x*x) / (2*x)\n                    var angle = Math.atan2(r, C)\n                    g.arc(node_pos[0], node_pos[1] - C, C + x, tau/4 - angle, tau/4 + angle)\n                } else {\n                    g.arc(node_pos[0], node_pos[1] + C, C + x, 0, tau)\n                }\n                g.fillStyle = 'lightblue'\n                g.fill()\n            }\n        }\n        \n        g.beginPath()\n        g.arc(node_pos[0], node_pos[1], r, 0, tau)\n        if (fully_acked[e[0]]) {\n            g.fillStyle = 'blue'\n            g.fill()\n        } else {\n            g.strokeStyle = 'blue'\n            g.stroke()\n        }\n        \n        draw_text(c, g, e[0].slice(0, 3), node_pos[0] + r, node_pos[1] + r, 'grey', 'left', 'top')\n    })\n    \n    Object.keys(res.unack_boundary).forEach(v => {\n        g.beginPath()\n        g.fillStyle = 'white'\n        var node_pos = get_node_pos(vs[v])\n        g.arc(node_pos[0], node_pos[1], r * 0.5, 0, Math.PI*2)\n        g.fill()\n    })\n    \n    Object.values(res.fissures).forEach(f => {\n        Object.keys(f.versions).forEach(v => {\n            if (!res.time_dag[v]) return\n            g.beginPath()\n            \n            var rand = Math.create_rand(f.conn)\n            g.strokeStyle = '#' + rand().toString(16).slice(2, 8)\n            \n            var node_pos = get_node_pos(vs[v])\n            //var rr = r * 1.45\n            var rr = r * (1 + rand())\n            \n            g.lineWidth = 2\n            if (f.a < f.b) {\n                \n\n\n                // work here\n                g.arc(node_pos[0], node_pos[1], rr, tau/4, tau*3/4)\n                \n                \n                \n                // g.moveTo(node_pos[0] - rr, node_pos[1] - rr)\n                // g.lineTo(node_pos[0] + rr, node_pos[1] - rr)\n                // g.lineTo(node_pos[0] + rr, node_pos[1] + rr)\n                // g.lineTo(node_pos[0] - rr, node_pos[1] + rr)\n            } else {\n                \n                g.arc(node_pos[0], node_pos[1], rr, tau/4, tau*3/4, true)\n                \n                \n                // var rrr = Math.sqrt(2) * rr\n                // g.moveTo(node_pos[0] - rrr, node_pos[1])\n                // g.lineTo(node_pos[0], node_pos[1] - rrr)\n                // g.lineTo(node_pos[0] + rrr, node_pos[1])\n                // g.lineTo(node_pos[0], node_pos[1] + rrr)\n                // g.closePath()\n            }\n            g.stroke()\n        })\n    })\n    \n}\n\nfunction draw_space_dag(c, g, S, x, y) {\n    function helper(node, y, px, py) {\n        g.beginPath()\n        g.moveTo(x, y)\n        g.lineTo(px, py)\n        g.lineWidth = 1\n        g.strokeStyle = 'lightblue'\n        g.stroke()\n\n        var begin_x\n        var end_x\n\n        draw_text(c, g, node.version ? node.version.slice(0, 3) : '_', x, y + 25, 'grey', 'left', 'middle')\n        \n        var my_text = node.elems.slice(0, 3) + (node.elems.length > 3 ? '..' : '') + (node.end_cap ? '*' : '')\n        \n        draw_text(c, g, my_text, x, y, Object.keys(node.deleted_by).length > 0 ? 'red' : 'blue', 'left', 'middle', '20px Arial')\n        \n        var width = g.measureText(my_text).width\n        x += width\n\n        var px = x\n        x += 10\n        for (var n of node.nexts) helper(n, y + 40, px, y)\n        if (node.next) helper(node.next, y, px, y)\n    }\n    if (typeof(S) == 'string') helper(sync9_create_space_dag_node('lit', S))\n    else helper(S, y, x, y)\n}\n\nfunction make_html(html) {\n    var d = document.createElement('div')\n    d.innerHTML = html\n    return d.firstChild\n}\n\nfunction lerp(t0, v0, t1, v1, t) { return (t - t0) * (v1 - v0) / (t1 - t0) + v0 }\nfunction add(a, b) { return a.map((x, i) => x + b[i]) }\nfunction sub(a, b) { return a.map((x, i) => x - b[i]) }\nfunction perp(a) { return [-a[1], a[0]] }\nfunction lenSq(a) { return a.reduce((a, b) => a + b*b, 0) }\nfunction len(a) { return Math.sqrt(lenSq(a)) }\nfunction mul(a, b) { return a.map(x => x * b) }\nfunction norm(a) { return mul(a, 1/len(a)) }\n\n</script>\n"
  },
  {
    "path": "kernel/demos/wiki/wiki-server.js",
    "content": "const port = 3007;\n\nrequire('../../../util/braid-bundler.js')\nvar fs = require('fs')\nvar bundle = fs.readFileSync('../../../builds/braid-bundle.js')\nvar wiki_client = fs.readFileSync('wiki-client.html')\nvar cb = (req, res) => {\n    res.writeHead(200)\n    res.end(req.url == '/braid-bundle.js' ? bundle : wiki_client)\n}\n\nvar server = (fs.existsSync('certs/private-key') && fs.existsSync('certs/certificate')) ?\n    require('https').createServer({\n        key: fs.readFileSync('certs/private-key'),\n        cert: fs.readFileSync('certs/certificate')\n    }, cb) :\n    require('http').createServer(cb)\nserver.listen(port)\nvar wss = new (require('ws').Server)({server})\n\nvar node = require('../../node.js')()\nvar store = require('../../sqlite-store.js')('db.sqlite')\nrequire('../../store.js')(node, store).then(node => {\n    node.fissure_lifetime = 1000*60*60*8 // 8 hours\n\n    node.on_errors.push((key, origin) => node.unbind(key, origin))\n\n    var ws = require('../../websocket-server.js')(node, {wss})\n\n    console.log('keys at startup: ' + JSON.stringify(Object.keys(node.resources)))\n    console.log('serving on port: ' + port)\n})"
  },
  {
    "path": "kernel/errors.js",
    "content": "function report (method, error) {\n    if (show_protocol_errors)\n        console.log('PROTOCOL ERROR for ' + method + ': ' + error)\n    throw error\n}\n\nmodule.exports = require.errors = (node) => ({\n    \n    get (args) {\n        var {key, subscribe, version, parents, origin} = args\n        //var key = args.key, subscribe = args.subscribe, parents = args.parents\n        if (!key || typeof(key) !== 'string')\n            report('get', 'invalid key' + JSON.stringify(key))\n\n        log('get:', node.pid, key)\n\n        var resource = node.resource_at(key)\n        if (subscribe && subscribe.keep_alive\n            && resource.keepalive_peers[origin.id])\n            report('get', 'we already welcomed them')\n\n        if (version && typeof(version) != 'string')\n            report('get', 'invalid version: ' + JSON.stringify(version))\n\n        if (parents && (typeof(parents) != 'object'\n                        || Object.entries(parents).some(([k, v]) => v !== true)))\n            report('get', 'invalid parents: ' + JSON.stringify(parents))\n    },\n\n    set (args) {\n        var {key, version, parents, patches, origin} = args\n\n        if (!key || typeof(key) !== 'string')\n            throw report('set', 'invalid key: ' + JSON.stringify(key))\n\n        var resource = node.resource_at(key)\n\n        // If you're trying to join a persistent consistent group, then\n        // you probably don't want to send any SETs before you actually\n        // join and know what the current version is:\n        if (origin && u.has_keep_alive(origin, key)\n            && !resource.keepalive_peers[origin.id])\n            report('set', 'we did not welcome them yet')\n\n        if (!patches || !Array.isArray(patches)\n            || patches.some(x => typeof(x) != 'string'))\n            report('set', 'invalid patches: ' + JSON.stringify(patches))\n\n        if (!version || typeof(version) != 'string')\n            report('set', 'invalid version: ' + JSON.stringify(version))\n\n        if (parents && (typeof(parents) != 'object'\n                        || Object.entries(parents).some(([k, v]) => v !== true)))\n            report('set', 'invalid parents: ' + JSON.stringify(parents))\n    },\n\n    welcome (args) {\n        var {key, versions, fissures, unack_boundary, min_leaves, parents, origin} = args\n        // Sanity-check the input\n        {\n            if (!key || typeof(key) != 'string')\n                report('welcome', 'invalid key: ' + JSON.stringify(key))\n\n            var resource = node.resource_at(key)\n            if (!resource.keepalive_peers[origin.id])\n                report('welcome', 'we did not welcome them yet')\n\n            if (!Array.isArray(versions) || !versions.every(v => {\n                if (v.version && typeof(v.version) != 'string') return false\n                if (!v.parents || typeof(v.parents) != 'object'\n                    || Object.entries(v.parents).some(([k, v]) => v !== true)) return false\n                if (!Array.isArray(v.patches)\n                    || v.patches.some(x => typeof(x) != 'string')) return false\n                if (v.hint) {\n                    if (!v.hint.sort_keys) return false\n                    if (typeof(v.hint.sort_keys) != 'object') return false\n                    if (!Object.entries(v.hint.sort_keys).every(([index, key]) => (''+index).match(/^\\d+$/) && typeof(key) == 'string')) return false\n                }\n                return true\n            })) {\n                report('welcome', 'invalid versions: ' + JSON.stringify(versions))\n            }\n\n            if (!Array.isArray(fissures) || !fissures.every(fissure => {\n                if (!fissure || typeof(fissure) != 'object') return false\n                if (typeof(fissure.a) != 'string') return false\n                if (typeof(fissure.b) != 'string') return false\n                if (typeof(fissure.conn) != 'string') return false\n                if (!fissure.versions || typeof(fissure.versions) != 'object'\n                    || !Object.entries(fissure.versions).every(([k, v]) => v === true)) return false\n                if (!fissure.parents || typeof(fissure.parents) != 'object'\n                    || !Object.entries(fissure.parents).every(([k, v]) => v === true)) return false\n                if (typeof(fissure.time) != 'number') return false\n                return true\n            })) {\n                report('welcome', 'invalid fissures: ' + JSON.stringify(fissures))\n            }\n\n            if (unack_boundary && (typeof(unack_boundary) != 'object'\n                                   || !Object.entries(unack_boundary).every(\n                                       ([k, v]) => v === true)))\n                report('welcome', 'invalid unack_boundary: '+JSON.stringify(unack_boundary))\n\n            if (min_leaves && (typeof(min_leaves) != 'object'\n                               || !Object.entries(min_leaves).every(\n                                   ([k, v]) => v === true)))\n                report('welcome', 'invalid min_leaves: ' + JSON.stringify(min_leaves))\n            \n            if (parents && (typeof(parents) != 'object'\n                               || !Object.entries(parents).every(\n                                   ([k, v]) => v === true)))\n                report('welcome', 'invalid parents: ' + JSON.stringify(parents))\n        }\n    },\n\n    forget (args) {\n        if (!key || typeof(key) != 'string')\n            report('forget', 'invalid key: ' + JSON.stringify(key))\n        if (!node.incoming_subscriptions.has(key, origin.id))\n            report('forget', `pipe \"${origin.id}\" did not get the key \"${key}\" yet`)\n    },\n\n    ack (args) {\n        var {key, valid, seen, version, origin} = args\n\n        // guard against invalid messages\n        if (typeof(key) !== 'string')\n            report('ack', 'invalid key: ' + JSON.stringify(key))\n\n        var resource = node.resource_at(key)\n        if (!resource.keepalive_peers[origin.id])\n            report('ack', 'we did not welcome them yet')\n\n        if (typeof(valid) !== 'undefined')\n            report('ack', 'support for valid flag not yet implemented')\n\n        if (seen !== 'local' && seen !== 'global')\n            report('ack', 'invalid seen: ' + JSON.stringify(seen))\n\n        if (typeof(version) !== 'string')\n            report('ack', 'invalid version: ' + JSON.stringify(version))\n    },\n\n    fissure ({key, fissure, origin}) {\n        if (typeof(key) !== 'string')\n            return report('fissure', 'invalid key: ' + JSON.stringify(key))\n\n        var resource = node.resource_at(key)\n\n        if ((!fissure          || typeof(fissure)          !== 'object') ||\n            (!fissure.a        || typeof(fissure.a)        !== 'string') ||\n            (!fissure.b        || typeof(fissure.b)        !== 'string') ||\n            (!fissure.conn     || typeof(fissure.conn)     !== 'string') ||\n            (!fissure.versions || typeof(fissure.versions) !== 'object'\n             || !Object.entries(fissure.versions).every(([k, v]) => v === true)) ||\n            (!fissure.parents || typeof(fissure.parents) !== 'object'\n             || !Object.entries(fissure.parents).every(([k, v]) => v === true)) ||\n            (typeof(fissure.time) !== 'number'))\n        {\n            report('fissure', 'invalid fissure: ' + JSON.stringify(fissure))\n        }\n    }\n})"
  },
  {
    "path": "kernel/http-client.js",
    "content": "// This file is still being used with the sync9-chat demo, but Mike will\n// refactor it soon.\n\nvar u = require('utilities.js');\n\n// Binds a node to a url, allowing the node to send GETS and SETS to that url\nmodule.exports = require['http-client-old'] = function add_http_client({node, url, prefix}) {\n    url = url       || 'https://localhost:80/'\n    prefix = prefix || '/*'\n    var enabled = true;\n    const controller = new AbortController();\n    // Make a fake pipe object\n    // The real ones check acks and synchronization and such\n    let pipe = {\n        id: u.random_id(), \n        send: send,\n        recv: function(args) {\n            if (args.method != \"ping\" && args.method != \"pong\") {\n                nlogf('H1', 'remote', '=->', 'local', args);\n            }\n            args.origin = pipe;\n            node[args.method](args);\n        },\n        //connection: \"http\"\n    };\n\n    node.bind(prefix, pipe)\n    \n    function send(args) {\n        if (args.method === 'get')\n            send_get(args)\n\n        else if (args.method === 'set')\n            send_set(args)\n\n        let symbol = (args.method === 'get' || args.method === 'set') ? '-=>' : '-|>';\n        if (args.method === 'error')\n            symbol = '-!>'\n        if (args.method != \"ping\" && args.method != \"pong\") {\n            nlogf('H1', 'local ', symbol, 'remote', args);\n        }\n    }\n    // Read sets from a persistent stream\n    function sets_from_stream(stream, callback, finished) {\n        // Set up a reader\n        let reader = stream.getReader()\n        let decoder = new TextDecoder('utf-8')\n        let buffer = '';\n        let headers = false;\n        let patches = [];\n        reader.read().then(function read ({value, done}) {\n            if (done) {\n                // subscription was closed\n                if (buffer.trim().length)\n                    console.debug(\"Connection was closed. Remaining data in buffer:\", buffer);\n                else\n                    console.debug(\"Connection was closed. Buffer was empty.\")\n                finished();\n                return;\n            }\n            const chunkStr = value ? decoder.decode(value) : \"\";\n            // Remove newlines at the beginning, maybe unnecessary\n            buffer = (buffer + chunkStr).trimStart();\n            if (value)\n                console.debug(`Got a chunk of length ${chunkStr.length}. Current buffer:`);\n            else\n                // If there's no new chunk then we must have had some data left over after a successful parse\n                console.debug(\"Reading on unchanged buffer:\")\n            console.debug(buffer);\n            // If we haven't parsed headers yet, try to parse headers.\n            if (!headers) {\n                console.debug(\"Trying to parse headers...\")\n                const parsedH = parse_headers();\n                if (parsedH) {\n                    headers = parsedH.headers;\n                    // Take the parsed headers out of the buffer\n                    buffer = buffer.substring(parsedH.consumeLength);\n                    console.debug(\"Success. Headers:\", headers)\n                } else {\n                    console.debug(\"Failed to parse headers. We probably don't have enough.\")\n                }\n            }\n            if (headers)\n                console.debug(\"Trying to parse patches...\")\n            // Try to parse patches. parse_patches returns boolean\n            if (headers && parse_patches()) {\n                console.debug(\"Success. Patches:\", patches)\n                // We have a complete message ... \n                let msg = {\n                    version: headers.version ? JSON.parse(headers.version) : null,\n                    patches: patches ? patches.slice() : null,\n                    parents: headers.parents ? {} : null\n                };\n                if (headers.parents)\n                    headers.parents.split(\", \").forEach(x => msg.parents[JSON.parse(x)] = true)\n                console.debug(\"Assembled complete message: \", msg);\n                setTimeout(callback, 0, msg);\n                headers = false;\n                patches = [];\n                // We've gotten a SET, but actually there might be more still in the buffer.\n                // We have to keep reading messages until we fail, and only then can we look for the next chunk.\n                console.debug(\"Restarting in current buffer...\")\n                return read({value: false, done: false});\n            } else {\n                if (headers)\n                    console.debug(\"Couldn't parse patches. We probably don't have enough.\")\n                console.debug(\"Waiting for next chunk to continue reading\")\n                return reader.read().then(read).catch(console.error);\n            }\n            \n        }).catch(console.error);\n        function parse_headers() {\n            // This string could contain a whole response.\n            // So first let's isolate to just the headers.\n            const end_of_headers = buffer.indexOf('\\n\\n');\n            if (end_of_headers == -1)\n                return false;\n            const stuff_to_parse = buffer.substring(0, end_of_headers)\n    \n            // Now let's grab everything from these headers\n            var headers = {},\n                regex = /([\\w-]+): (.*)/g,\n                temp\n            while (temp = regex.exec(stuff_to_parse))\n                headers[temp[1].toLowerCase()] = temp[2]\n            // TODO: Parse key-value pair headers and list headers.\n            return {headers: headers, consumeLength: end_of_headers + 2}\n        }\n        function parse_patches() {\n            if (headers['content-length']) {\n                console.debug(\"Got an absolute body\")\n                // This message has \"body\"\n                const length = headers['content-length'];\n                if (h.length + length < buffer.length)\n                    return false;\n                // ... \n                // This behavior is not in the initial http1 spec, so we don't have to worry about it\n            }\n            if (headers.patches) {\n                // Parse patches until we run out of patches to parse or get all of them\n                while (patches.length < headers.patches) {\n                    buffer = buffer.trimStart();\n                    const parsePatchHeaders = parse_headers();\n                    if (!parsePatchHeaders) {\n                        console.debug(\"Failed to parse patch headers!\")\n                        return false;\n                    }\n                    const patchHeaders = parsePatchHeaders.headers;\n                    const headerLength = parsePatchHeaders.consumeLength;\n                    // assume we have content-length...\n                    const length = parseInt(patchHeaders['content-length']);\n                    // Does our current buffer contain enough data that we have the entire patch?\n                    if (buffer.length < headerLength + length) {\n                        console.debug(\"Buffer is too small to contain the rest of the patch...\")\n                        return false;\n                    }\n                    // Assume that content-range is of the form 'json .index'\n                    const r = patchHeaders['content-range']\n                    const patchRange = r.startsWith(\"json \") ? r.substring(5) : r;\n                    const patchValue = buffer.substring(headerLength, headerLength + length);\n                    // We've got our patch!\n                    patches.push(`${patchRange} = ${patchValue}`);\n                    buffer = buffer.substring(headerLength + length);\n                    console.debug(`Successfully parsed a patch. We now have ${patches.length}/${headers.patches}`);\n                }\n                console.debug(\"Parsed all patches.\")\n                return true;\n            }\n        }\n    }\n    function send_get (msg) {\n        var h = {\"x-client-id\": node.pid};\n        if (msg.version) h.version = JSON.stringify(msg.version)\n        if (msg.parents) h.parents = Object.keys(msg.parents).map(JSON.stringify).join(', ')\n        \n        if (msg.subscribe) {\n            if (msg.subscribe.keep_alive)\n                msg.subscribe.keep_alive = false;\n            h.subscribe = Object.entries(msg.subscribe)\n                .map(a => `${a[0].replace(\"_\", \"-\")}=${a[1]}`)\n                .join(\";\");\n        }\n        \n        const sendUrl = new URL(msg.key, url);\n        function trySend(waitTime) {\n            console.log(`Fetching ${sendUrl}`);\n            fetch(sendUrl, {method: 'GET',\n                            mode: 'cors',\n                            headers: new Headers(h),\n                            signal: controller.signal})\n                .then(function (res) {\n                    if (!res.ok) {\n                        console.error(\"Fetch failed!\", res)\n                        return\n                    }\n                    sets_from_stream(res.body, \n                        callback = setMessage => {\n                            // Insert the method and key into this\n                            setMessage.method = \"set\";\n                            setMessage.key = msg.key;\n                            // TODO: Don't have to do this.\n                            node.resource_at(msg.key).weve_been_welcomed = true;\n                            pipe.recv(setMessage);\n                        },\n                        finished = () => {\n                            // Maybe close the fetch?? idk\n                            console.warn(`Subscription to ${msg.key} ended by remote host`);\n                        }\n                    );\n                })\n                .catch(function (err) {\n                    console.error(\"Fetch GET failed: \", err)\n                    // Exponential backoff\n                    setTimeout(() => trySend(Math.min(waitTime * 5, 100000)), waitTime)\n                })\n        }\n        trySend(100);\n        \n    }\n    function send_set (msg) {\n        var h = {\n            'content-type': 'application/json',\n            'merge-type': 'sync9',\n            \"x-client-id\": node.pid\n        }\n        if (msg.version) h.version = JSON.stringify(msg.version)\n        if (msg.parents) h.parents = Object.keys(msg.parents).map(JSON.stringify).join(', ')\n        if (msg.subscribe) h.subscribe = Object.entries(msg.subscribe)\n            .map(a => `${a[0].replace(\"_\", \"-\")}=${a[1]}`)\n            .join(\";\");\n\n        let body = msg.patch;\n        if (msg.patches) {\n            // Write patches as pseudoheaders\n            body = msg.patches.map(patch => {\n                // We should use the sync9 patch parser\n                const split = patch.match(/(.*?)\\s*=\\s*(.*)/); // (...) = (...)\n                const length = `content-length: ${split[2].length}`;\n                const range = `content-range: json ${split[1]}`;\n                return `${length}\\n${range}\\n\\n${split[2]}\\n`\n            }).join(\"\\n\");\n            h.patches = msg.patches.length;\n        }\n        const sendUrl = new URL(msg.key, url);\n        function trySend(waitTime) {\n            fetch(sendUrl, {method: 'PUT',\n                            body: body,\n                            mode: 'cors',\n                            headers: new Headers(h)})\n                .then(function (res) {\n                    res.text().then(function (text) {\n                        console.debug(`Received SET response: status ${res.status}, body \"${text}\"`)\n                    })\n                })\n                .catch(function (err) {\n                    console.error(\"Fetch SET failed: \", err);\n                    // Exponential backoff\n                    setTimeout(() => trySend(Math.min(waitTime * 5, 100000)), waitTime)\n                });\n        }\n        trySend(20);\n    }\n    \n    return {\n        pipe,\n        enabled() {return enabled},\n        enable()  {nlog('ENABLING PIPE', pipe.id); enabled = true; }, // connect()\n        disable() {nlog('DISABLING PIPE',pipe.id); enabled = false; controller.abort()}, // disconnect()\n        toggle()  {if (enabled) {disable()} else enable()}\n    }\n}"
  },
  {
    "path": "kernel/http-server.js",
    "content": "// This file is still being used with the sync9-chat demo, but Mike will\n// refactor it soon.\n\n// Example braid-peer as a web server\n//const fs = require('fs');\nconst assert = require('assert');\n//const pipe = require('../pipe.js');\nconst parseHeaders = require('parse-headers');\nvar u = require('../util/utilities.js');\n\nmodule.exports = function add_http_server(node) {\n    // Write an array of patches into the pseudoheader format.\n    const openPipes = {};\n    function writePatches(patches) {\n        // This will return something like:\n        // Patches: n\n        // \n        // content-length: 14 // patch #1\n        // content-range: json .range (or) json [indices]\n        //\n        // [\"json object\"]\n        //\n        // content-length: x // patch #2\n        // ...\n        let out = `patches: ${patches.length}\\n`\n        for (let patch of patches) {\n            out += \"\\n\"\n            // This should be rewritten to use sync9's patch parser.\n            const split = patch.match(/(.*?)\\s*=\\s*(.*)/);\n            assert(split.length == 3)\n            const range = split[1];\n            const change = split[2];\n            out += `content-length: ${change.length}\\n`;\n            out += `content-range: json ${range}\\n`;\n            out += \"\\n\";\n            out += `${change}\\n`;\n        }\n        return out;\n    }\n    // This function reads n patches in pseudoheader format from a ReadableStream\n    //   and then fires a callback when they're finished\n    // Might be nice to use a promise here\n    function readPatches(n, stream, cb) {\n        let patches = [];\n        let curPatch = \"\";\n        if (n == 0)\n            return cb(patches);\n        stream.on('data', function parse (chunk) {\n            // Otherwise we'll have extra newline at the start. I'm not sure if this would mess up parse-headers.\n            curPatch = (curPatch + chunk).trimStart();\n            // Find out if we have an entire patch.\n            // This means: first, we look for a double newline.\n            const headerLength = curPatch.indexOf(\"\\n\\n\");\n            if (headerLength == -1) return;\n            // Now that we have all the headers, we have to parse them and look for content-length\n            // TODO: Support Transfer-Encoding: Chunked (maybe?)\n            const headers = parseHeaders(curPatch.substring(0, headerLength));\n            assert(headers['content-length']);\n            const length = parseInt(headers['content-length']);\n            // Does our current buffer contain enough data that we have the entire patch?\n            if (curPatch.length < headerLength + 2 + length) return;\n            // Assume that content-range is of the form 'json .index'\n            const patchRange = headers['content-range'].startsWith(\"json \") ?\n                headers['content-range'].substring(5) :\n                headers['content-range'];\n            const patchValue = curPatch.substring(headerLength + 2, headerLength + 2 + length);\n            // We've got our patch!\n            patches.push(`${patchRange} = ${patchValue}`);\n            curPatch = curPatch.substring(headerLength + 2 + length);\n            if (patches.length == n) {\n                stream.pause();\n                cb(patches);\n            } else \n                // Try parsing for another message in the current buffer\n                parse(\"\");\n        });\n        stream.on('end', () => {\n            // If the stream ends before we get everything, then return what we did receive\n            if (patches.length != n) {\n                console.warn(`Got an incomplete PUT: ${patches.length}/${n} patches were received`);\n                cb(patches);\n            }\n        })\n    }\n    // Construct a (fake) pipe object that allows writing data into a persistent stream\n    function responsePipe(res, id) {\n        // Construct pipe\n        const pipe = {\n            id: id,\n            send: sendVersions,\n            disconnect: disconnect\n            //connection: \"http\", // These are supposed to be unique ids of some sort :)\n        };\n\n        const allowedMethods = [\"set\", \"welcome\"]\n        // The node will call this method with JSON messages\n        function sendVersions (args) {\n            let symbol = allowedMethods.includes(args.method) ? '-=>' : '-|>';\n            if (args.method === 'error')\n                symbol = '-!>'\n            if (args.method != \"ping\" && args.method != \"pong\") {\n                nlogf('H1', 'server', symbol, id.slice(0,6).padEnd(6), args);\n            }\n            // The protocol doesn't support things like acks and fissures\n            if (!allowedMethods.includes(args.method)) {\n                return;\n            }\n            // Extract the three relevant fields from JSON message\n            let versions = [];\n            if (args.method == \"welcome\") {\n                versions = args.versions.map(f => ({\n                    version: f.version,\n                    parents: f.parents,\n                    patches: f.patches\n                }))\n            } else if (args.method == \"set\") {\n                versions = [{\n                    version: args.version,\n                    parents: args.parents,\n                    patches: args.patches\n                }]\n            }\n            for (let version of versions) {\n                if (version.version)\n                    res.write(`Version: ${JSON.stringify(version.version)}\\n`)\n                if (Object.keys(version.parents).length)\n                    res.write(`Parents: ${Object.keys(version.parents).map(JSON.stringify).join(\", \")}\\n`)\n                \n                res.write(\"Merge-Type: sync9\\n\")\n                res.write(\"Content-Type: application/json\\n\")\n                res.write(writePatches(version.patches)) // adds its own newline\n                res.write(\"\\n\")\n            }\n        }\n        function disconnect () {res.end(); }\n        return pipe;\n    }\n    // The entry point of the server.\n    // Listen for requests\n    function handleHttpResponse(req, res) {\n        // Apply hardcoded access control headers\n        // The cors() method will return true if the request is an OPTIONS request\n        // (It'll also respond 200 and end the stream)\n        if (cors(req, res))\n            return;\n        // There should be a better way to do this.\n        // Initially, this would take a message, create a pipe, and recv the message\n        // But it turns out that in many cases you actually want to set some data on the node\n        //   before it receives the message but after the pipe is created\n        const create_pipe = (id) => {\n            if (openPipes[id]) {\n                console.error(\"ClientID collision!\");\n                return;\n            }\n            let pipe = responsePipe(res, id);\n            openPipes[id] = {key: req.url, origin: pipe};\n            res.on('close', () => {\n                console.log(`Connection closed on ${req.url}`);\n                assert(openPipes[id]);\n                node.forget(openPipes[id]);\n                delete openPipes[id];\n            });\n        };\n        const recv = (id, msg) => {\n            if (msg.method != \"ping\" && msg.method != \"pong\") {\n                nlogf('H1', id.slice(0,6).padEnd(6), '=->', 'server', msg);\n            }\n            if (openPipes[id])\n                msg.origin = openPipes[id].origin;\n            return node[msg.method](msg);\n        }\n        // Copy headers that have the same value in HTTP as Braid\n        let msg = {\n            key: req.url\n        }\n        // Copy headers that need minor modifications but no additional conditionals\n        if (req.headers.version)\n            msg.version = JSON.parse(req.headers.version)\n        if (req.headers.parents) {\n            msg.parents = {};\n            req.headers.parents.split(\", \").forEach(x => msg.parents[JSON.parse(x)] = true)\n        }\n        // If we end up having more methods supported, maybe make this a switch\n        if (req.method == \"GET\") {\n            res.setHeader('cache-control', 'no-cache, no-transform');\n            \n            if (!req.headers.hasOwnProperty(\"subscribe\")) {\n                // Respond over plain http\n                res.setHeader('content-type', 'text/json');\n                res.statusCode = 200;\n                // If the origin is just an id, then there will be no callback or subscription.\n                msg.origin = {id: 'null-pipe'};\n                // And the node will just return the value of the resource at the specified version \n                res.end(JSON.stringify(node.get(msg)));\n                return;\n            }\n            // Set some headers needed to indicate a subscription.\n            res.statusCode = 209;\n            res.setHeader(\"subscribe\", req.headers.subscribe)\n            res.setHeader('content-type', 'text/braid-patches');\n            // res.setHeader('connection', 'Keep-Alive');\n            // Parse the subscribe header. Options are:\n            // keep-alive=true    # this can actually be specified as just keep-alive, but we can support that later\n            // keep-alive=false\n            // keep-alive=number\n            let subStr = req.headers.subscribe.match(/keep-alive=(\\w+)/)[1];\n            let sub = false;\n            if (subStr == \"true\")\n                sub = true;\n            else if (subStr != \"false\") // It's a number\n                sub = parseInt(subStr);\n            msg.subscribe = {\"keep-alive\": sub};\n            msg.method = \"get\"\n\n            // Receive the request\n            const clientID = `${req.headers['x-client-id'] || u.random_id()}=>${msg.key}`;\n            create_pipe(clientID);\n            // recv will call get(), which will return the value of the resource if successful and undefined otherwise\n            let result = recv(clientID, msg);\n            if (result == undefined)\n                res.end(500);\n        }\n        else if (req.method == \"PUT\") {\n            // We only support these headers right now...\n            assert(req.headers[\"content-type\"] == \"application/json\")\n            assert(req.headers[\"merge-type\"] == \"sync9\")\n            let status = 200;\n            if (!node.resources[msg.key])\n                // If we don't have the resource, it'll be created.\n                // We actually need to add a way to prevent clients from creating braid resources with the same names\n                //   as file resources, which would make them unreadable.\n                // I think we should instead make the server explicitly bind itself to some paths.\n                status = 201;\n            res.statusCode = status;\n            msg.method = \"set\"\n            // Parse patches\n            // Try to read patches from the request body\n            // req.headers.patches is the number of patches expected\n            readPatches(req.headers.patches, req, (patches) => {\n                // When finished, create a pipe.\n                msg.patches = patches;\n                res.setHeader(\"patches\", \"OK\");\n                const clientID = `${req.headers['x-client-id'] || u.random_id()}=>${msg.key}`;\n                // recv will call node.set, which will return `version` if successful and undefined otherwise\n                // TODO: Maybe return an error code from get/set?\n                let result = recv(clientID, msg);\n                if (result == undefined)\n                    res.statusCode = 500;\n                res.end();\n            })\n        }\n    }\n    function cors(req, res) {\n        const free_the_cors = {\n            \"Access-Control-Allow-Origin\": \"*\"\n            ,\"Access-Control-Allow-Methods\": \"OPTIONS, HEAD, GET, PUT\"\n            ,\"Access-Control-Allow-Headers\": \"subscribe, x-client-id, version, parents, merge-type, content-type\"\n        };\n        Object.entries(free_the_cors).forEach(x => res.setHeader(x[0], x[1]));\n        if ( req.method === 'OPTIONS' ) {\n            res.writeHead(200);\n            res.end();\n            return true;\n        }\n        return false;\n    }\n    process.on('SIGINT', function() {\n        if (Object.keys(openPipes).length) {\n            console.log(\"\\nForgetting H1 connections:\");\n            Object.values(openPipes).forEach(sub => {\n                console.log(`    pipe ${sub.origin.id} on resource${sub.key}`);\n                node.forget(sub);\n                sub.origin.disconnect();\n            });\n            console.log(\"Closing process\");\n        }   \n        process.exit();\n    });\n    return handleHttpResponse;\n}"
  },
  {
    "path": "kernel/leadertab-shell.js",
    "content": "var util = require('utilities.js');\nvar store = require('store.js');\n\nconst states = {\n    // Don't process incoming commands and don't send outgoing ones\n    DISABLED: 0,\n    // The leader exists and it is not us.\n    // We should send any activity to the leader.\n    CLIENT: 1, \n    // There is no leader.\n    // We should try to become the leader, and save anything we get until then.\n    // We should also broadcast anything we do before then.\n    ELECTING: 2, \n    // We have become the leader, but we aren't ready to send things to the server yet.\n    // We should get the connection ready, and save anything we get until then.\n    ELECTED: 3,\n    // We are the leader.\n    // We should apply incoming commands and broadcast new state.\n    LEADER: 4\n};\nconst signal_types = {\n    // The leader has submitted their letter of resignation.\n    // The leader is not going to handle events during the election.\n    // This means we have to cache incoming events.\n    LEADER_UNLOADING: \"leader-unloading\",\n    // The election is starting.\n    START_ELECTION: \"start-election\",\n    // Any client can send a PING to the leader\n    PING: \"ping\",\n    // Only the leader responds to a ping, and it responds with a pong.\n    PONG: \"pong\",\n    // A command sent by a client to the leader.\n    COMMAND: \"command\",\n    // The leader has received new state from the remote peer.\n    STATE: \"state\"\n};\nconst channel_name = \"braid-leadertab\";\n// We can basically make this as low as we want.\n// Since the leader tab has a websocket open (if alive), it can instantly respond to our ping\n// and it doesn't use timers.\nconst ping_timeout = 200;\n\nconst db_name = \"braid-db\";\n// This table the state of the braid\nconst db_network_store = \"braid-network\";\n// This table is just a mutex\nconst db_election_store = \"election\";\n// Every subscribed key gets a localstorage entry of the form prefix_key\n// This var is the prefix used (with the separator attached)\nconst ls_sub_prefix = \"braidsub\" + \"_\";\n\nmodule.exports = require[\"leadertab-shell\"] = function(url) {\n    // Our leaderId, probably not actually needed.\n    const id = util.random_id();\n    // Timeout handle for leader activity\n    let leader_alive_id;\n    // The channel over which we will broadcast state and commands\n    const channel = new BroadcastChannel(channel_name);\n    // Buffer for commands received during leader initialization\n    let command_queue = [];\n    // Until we're sure who the leader is, we want to buffer things.\n    let state = states.ELECTING;\n    // Try to open the DB\n    const dbPromise = idb.openDB(db_name, 4, { upgrade(db) {\n        if (!db.objectStoreNames.contains(db_network_store))\n            db.createObjectStore(db_network_store);\n        if (!db.objectStoreNames.contains(db_election_store))\n            db.createObjectStore(db_election_store);\n    }})\n    // Try to become the leader ASAP\n    dbPromise.then(becomeLeader());\n\n    // Stuff for the leader\n    let node;\n    let socket;\n    // The pipe.id for each registered subscription callback\n    // This is a local variable because when the connection is migrated, subscriptions\n    // will be recreated with new IDs.\n\n    // The pipe created in websocket-client.js is capable of managing subscriptions,\n    // but to use it we'd have to store the pipe in the db.\n    // TODO: Storing the pipe in the db might actually be good\n    let remote_get_handlers = {};\n    let local_defaults = {};\n\n    /** \n     * Route an incoming message to various handlers\n     */\n    channel.addEventListener('message', (event) => {\n        if (state === states.DISABLED)\n            return;\n        \n        switch (event.data.type) {\n            // Communication about braid objects\n            case signal_types.COMMAND:\n                if (state !== states.CLIENT)\n                    handleCommand(event.data);\n                break;\n            case signal_types.STATE:\n                recvState(event.data);\n                break;\n            // Leader-alive verification\n            case signal_types.PING:\n                if (state === states.LEADER || state === states.ELECTED)\n                    channel.postMessage({type: signal_types.PONG})\n                break;\n            case signal_types.PONG:\n                clearTimeout(leader_alive_id);\n                break;\n            // Leader changing\n            case signal_types.LEADER_UNLOADING:\n                state = states.ELECTING;\n                break;\n            case signal_types.START_ELECTION:\n                if (state === states.ELECTING)\n                    becomeLeader();\n                break;\n            default:\n                console.warn(\"Unknown signal type in message\", event.data);\n        }\n    })\n    /**\n     * When the leader tab is closed, it will inform other clients and start an election\n     */\n    async function startElection(local_eligible) {\n        channel.postMessage({type: signal_types.LEADER_UNLOADING});\n        if (local_eligible)\n            state = states.ELECTING;\n        // Unset the leader\n        const db = await dbPromise;\n        try {\n            await db.delete(db_election_store, \"leader\");\n        } catch (e) {\n            console.error(\"Failed to delete leader. \\nThis is most likely because someone else managed to do it first.\");\n            console.error(e);\n        }\n        // Start an election\n        channel.postMessage({type: signal_types.START_ELECTION});\n        if (local_eligible)\n            becomeLeader();\n    }\n    function resign() {\n        // If this tab is the leader, it should trigger an election\n        if (state === states.LEADER || state === states.ELECTED) {\n            // TODO: Is there a way to make sure the browser doesn't shut down the JS thread\n            // before we've had a chance to call for an election?\n            startElection();\n        }\n\n        // The only case in which we'll have a socket and not be the leader\n        // is if we were the leader and we were impeached for inactivity\n        if (socket)\n            socket.disable();\n        state = states.DISABLED;\n    }\n    /**\n     * Using the electionstore as a mutex, attempt to set ourselves as the leader.\n     * On success, prepare the leader responsibilities.\n     * On failure, make ourselves a client.\n     */\n    async function becomeLeader() {\n        console.log(\"Trying to become leader...\")\n        // Try to set ourselves as the leader\n        try {\n            const db = await dbPromise;\n            const tx = db.transaction(db_election_store, \"readwrite\");\n            // This promise will reject if leaderKey is already set.\n            await Promise.all([\n                tx.store.add(id, \"leader\"),\n                tx.done\n            ]);\n        } catch (e) {\n            // If we get a constrainterror or aborterror, that means the above promise rejected.\n            if (e.name !== 'ConstraintError' && e.name !== 'AbortError')\n                console.error(e);\n            // So we're a client.\n            state = states.CLIENT;\n            console.log(\"We're a client.\")\n            // We can also forget the command queue.\n            command_queue.length = 0;\n            // Finally, check the leader for activity.\n            pingLeader();\n            return;\n        }\n        console.log(\"We became the leader.\")\n        // If we get here, then we successfully added our id to the store, making us the leader.\n        state = states.ELECTED;\n        // Create a node\n        node = braidShell.node = require(\"braid.js\")();\n        // Fast forward the node using the db\n        await store(node, {\n            async get(key) {\n                return (await dbPromise).get(db_network_store, key);\n            },\n            async set(key, data) {\n                return (await dbPromise).put(db_network_store, data, key);\n            },\n            async del(key) {\n                return (await dbPromise).delete(db_network_store, key);\n            },\n            async list_keys() {\n                return (await dbPromise).getAllKeys(db_network_store);\n            }\n        });\n        Object.entries(local_defaults)\n            .map(([key, value]) => node.default(key, value))\n        \n        // Connect the node to the network\n        socket = require(url.startsWith(\"http\") ? 'http-client.js' : 'websocket-client.js')({node, url});\n        socket.addEventListener(\"connect\", () => {\n            // Resend GETs that we might have lost while migrating\n            Object.keys(localStorage)\n            .filter(k => k.startsWith(ls_sub_prefix))\n            .forEach(storage_key => {\n                let braid_key = storage_key.substring(ls_sub_prefix.length);\n                // see https://stackoverflow.com/q/12862624\n                if ((+localStorage.getItem(storage_key)) > 0)\n                    remote_get_handlers[braid_key] = subscribe(braid_key)\n            })\n\n            // Now we're done, so we can start leading.\n            state = states.LEADER;\n            // Do anything that we might have queued up during the election.\n            while (command_queue.length)\n                handleCommand(command_queue.shift());\n        });\n        socket.enable();\n    }\n\n    /**\n     * Create a subscription to a remote key, and send the results over the broacast channel.\n     */\n    function subscribe(key) {\n        if (remote_get_handlers.hasOwnProperty(key))\n            throw `Attempted double-subscription of ${key}`\n        function cb(val) {\n            // Whenever we get a new version of key\n            let outMessage = {type: signal_types.STATE, key, val};\n            // Send it to everyone else\n            channel.postMessage(outMessage);\n            // Receive it ourselves\n            recvState(outMessage);\n        };\n        node.get(key, cb);\n        return cb.pipe.id;\n    }\n    /**\n     * Apply commands send over the broadcast channel to the node.\n     */\n    function handleCommand(command) {\n        // During the election, we don't know who will end up as the leader.\n        // If it could be us, we want to enqueue messages, and process or discard them later.\n        if (state === states.ELECTING || state === states.ELECTED) {\n            command_queue.push(command);\n            return;\n        }\n        // Have the node receive the command\n        switch (command.method) {\n            case \"get\": {\n                let ls_sub_key = ls_sub_prefix + command.key\n                // Localstorage returns null for unknown properties\n                // and +null == 0\n                let sub_count = +localStorage.getItem(ls_sub_key);\n                if (sub_count++ === 0)\n                    remote_get_handlers[command.key] = subscribe(command.key);\n                else \n                    channel.postMessage({\n                        type: signal_types.STATE,\n                        key: command.key,\n                        val: node.resource_at(command.key).mergeable.read()\n                    })\n                localStorage.setItem(ls_sub_key, sub_count);\n                break;\n            }\n            case \"set\":\n                node.setPatch(command.key, command.patch);\n                break;\n            case \"forget\": {\n                // This is going to look very similar to the \"get\" code\n                let ls_sub_key = ls_sub_prefix + command.key\n                let sub_count = +localStorage.getItem(ls_sub_key);\n                if (sub_count <= 0)\n                    throw `Can't unsub from ${command.key} because we aren't subscribed to it`\n                \n                let id = remote_get_handlers[command.key];\n                // If this was the last sub, send the forget upstream\n                if (--sub_count === 0) {\n                    node.forget(command.key, {pipe: {id}});\n                    delete remote_get_handlers[command.key];\n                }\n                localStorage.setItem(ls_sub_key, sub_count)\n                break;\n            }\n            default:\n                console.warn(\"Can't handle message\", command);\n        }\n    }\n\n    /**\n     * Send a command when requested by the local frontend.\n     */\n    function send(message) {\n        message.type = signal_types.COMMAND;\n        // Unless we're definitely the leader, broadcast stuff\n        if (state === states.CLIENT || state === states.ELECTING)\n            channel.postMessage(message);\n        if (state !== states.CLIENT)\n            handleCommand(message);\n    }\n    /**\n     * Inform the frontend of new state\n     */\n    function recvState(message) {\n        if (local_get_handlers.hasOwnProperty(message.key))\n            local_get_handlers[message.key].forEach(f => f(message.val));\n    }\n    /**\n     * Ping the leader to make sure it's alive\n     */\n    function pingLeader(time) {\n        if ((state !== states.CLIENT  && state !== states.ELECTING)\n            || document.visibilityState !== \"visible\")\n            return;\n        clearTimeout(leader_alive_id);\n        channel.postMessage({type: signal_types.PING});\n        // Start the election, and tell this tab that it's a candidate\n        leader_alive_id = setTimeout(() => startElection(true), time || ping_timeout);\n    }\n    document.addEventListener(\"visibilitychange\", () => pingLeader(), false);\n    // Bind the shell methods\n    let braidShell = {};\n    let local_get_handlers = {};\n    \n    braidShell.ping = pingLeader;\n    // It is the responsibility of the programmer to call close() before the page unloads!\n    braidShell.close = resign;\n    // Allow the frontend to get the state\n    braidShell.getState = () => state;\n\n    braidShell.get = (key, cb) => {\n        // TODO\n        if (!cb)\n            throw \"callback is required when using leadertab\"\n        cb.id = util.random_id();\n        // Add callback\n        if (local_get_handlers[key])\n            local_get_handlers[key].push(cb);\n        else {\n            local_get_handlers[key] = [cb];\n            send({method: \"get\", key: key})\n        }\n    };\n    braidShell.set = (key, value) => {\n        send({method: \"set\", key, patch: [`= ${JSON.stringify(value)}`]});\n    };\n    braidShell.setPatch = (key, patch) => {\n        send({method: \"set\", key, patch});\n    };\n    braidShell.forget = (key, cb) => {\n        let index = local_get_handlers[key].findIndex(e => e.id === cb.id);\n        if (index == -1)\n            return;\n        local_get_handlers[key].splice(index, 1);\n        if (local_get_handlers[key].length == 0)\n            send({method: \"forget\", key});\n    };\n    braidShell.default = (key, val) => {\n        local_defaults[key] = val;\n        if ((state === states.LEADER || state === states.ELECTED) && node)\n            node.default(key, val);\n    }\n    return braidShell;\n};"
  },
  {
    "path": "kernel/llww.js",
    "content": "module.exports = require.llww = (resource) => {\n    resource.value = undefined\n\n    return {\n        add_version (version, parents, patches) {\n            patches.forEach(patch => apply_patch(patch, resource))\n        },\n\n        read (version) {\n            assert(!version)\n            return resource.value\n        },\n\n        generate_braid (versions) {\n            if (resource.value === undefined)\n                return []\n            assert(!versions || is_current_version(versions, resource))\n            return [{\n                patches: [` = ${JSON.stringify(resource.value)}`]\n            }]\n        }\n    }\n}\n\nvar is_current_version = (versions, resource) =>\n    Object.keys(versions).length === Object.keys(resource.current_version).length\n    && Object.keys(versions).every(v => resource.current_version[v] === true)\n\n\nvar parse_patch = require('../../../util/utilities.js').parse_patch\nfunction apply_patch (patch, resource) {\n    // Todo: Handle slices\n    var parse = parse_patch(patch)\n    console.log('applying', {parse, to: resource.value})\n    if (parse.path.length > 0) {\n        var target = resource.value\n        for (var i = 0; i < parse.path.length - 1; i++)\n            target = target[p]\n        target[parse.patch.length] = parse.value\n    }\n    else\n        resource.value = parse.value\n}\n"
  },
  {
    "path": "kernel/node.js",
    "content": "u = require('../util/utilities.js')\n\nmodule.exports = require.node = function create_node(node_data = {}) {\n    var node = {}\n    node.init = (node_data) => {\n        node.pid = node_data.pid || u.random_id()\n        node.resources = node_data.resources || {}\n        for (var key of Object.keys(node.resources)) {\n            node.resources[key] = create_resource(node.resources[key])\n        }\n        if (node_data.fissure_lifetime !== null)\n            node.fissure_lifetime = node_data.fissure_lifetime\n        if (node.fissure_lifetime === undefined)\n            node.fissure_lifetime = 1000 * 60 * 60 * 8  // Default to 8 hours\n\n        node.max_fissures = node_data.max_fissures\n\n        node.defaults = Object.assign(u.dict(), node.defaults || {})\n        node.default_patterns = node.default_patterns || []\n\n        node.ons = []\n        node.on_errors = []\n    \n        node.incoming_subscriptions = u.one_to_many()  // Maps `key' to `pipes' subscribed to our key\n\n        node.antimatter      = require('./antimatter')(node)\n        node.protocol_errors = require('./errors'    )(node)\n    }\n    node.init(node_data)\n\n    node.resource_at = (key) => {\n        if (typeof key !== 'string')\n            throw (JSON.stringify(key) + ' is not a key!')\n        if (!node.resources[key])\n            node.resources[key] = create_resource()\n\n        return node.resources[key]\n    }\n\n    var default_pipe = {id: 'null-pipe'}\n\n    // Can be called as:\n    //  - get(key)\n    //  - get(key, cb)\n    //  - get({key, origin, ...})\n    node.get = (...args) => {\n        var key, version, parents, subscribe, origin\n\n        // First rewrite the arguments if called as get(key) or get(key, cb)\n        if (typeof args[0] === 'string') {\n            key = args[0]\n            var cb = args[1]\n            origin = (cb\n                      ? {id: u.random_id(), send(args) {\n                          // We have new data with every 'set' or 'welcome message\n                          if ((args.method === 'set' || args.method === 'welcome')\n                              && (node.resource_at(key).weve_been_welcomed\n                                  // But we only wanna return once we have\n                                  // applied any relevant default.  We know\n                                  // the default has been applied because\n                                  // there will be at least one version.\n                                  && !(default_val_for(key)\n                                       && !node.current_version(key)))) {\n\n                              // Let's also ensure this doesn't run until\n                              // (weve_been_welcomed || zero get handlers are registered)\n\n                              // And if there is a .default out there, then\n                              // make sure the state has at least one version\n                              // before calling.\n                              cb(node.resource_at(key).mergeable.read())}}}\n                      : default_pipe)\n            if (cb) cb.pipe = origin\n        }\n        else {\n            // Else each parameter is passed explicitly\n            ({key, version, parents, subscribe, origin} = args[0])\n        }\n\n        // Set defaults\n        if (!version)\n            // We might default keep_alive to false in a future version\n            subscribe = subscribe || {keep_alive: true}\n\n        if (!origin)\n            origin = {id: u.random_id()}\n\n        // Define handy variables\n        var resource = node.resource_at(key)\n\n        // Handle errors\n        try {\n            node.protocol_errors.get({...args, key, subscribe, version, parents, origin})\n        }\n        catch (errors) { return errors }\n\n        node.ons.forEach(on => on('get', {key, version, parents, subscribe, origin}))\n\n        // Now record this subscription to the bus\n        node.incoming_subscriptions.add(key, origin.id, origin)\n        // ...and bind the origin pipe to future sets\n        node.bind(key, origin)\n\n        // If this is the first subscription, fire the .on_get handlers\n        if (node.incoming_subscriptions.count(key) === 1) {\n            log('node.get:', node.pid, 'firing .on_get for',\n                node.bindings(key).length, 'pipes!')\n            // This one is getting called afterward\n            node.bindings(key).forEach(pipe => {\n\n                var best_t = -Infinity\n                var best_parents = null\n                Object.values(node.resource_at(key).fissures).forEach(f => {\n                    if (f.a == node.pid && f.b == pipe.remote_peer && f.time > best_t) {\n                        best_t = f.time\n                        best_parents = f.versions\n                    }\n                })\n\n                pipe.send && pipe.send({\n                    method:'get', key, version, parents: best_parents, subscribe\n                })\n            })\n        }\n\n        // Now if the person connecting with us wants to be a citizen, they'll\n        // set \"pid\", and we'll want to send them a \"get\" as well so that we\n        // can learn about their updates -- of course, when they get that get,\n        // we don't want an echo war of gets begetting gets, so when someone\n        // sends the initial get, they set \"initial\" to true, but we respond\n        // with a get with initial not set to true\n\n        // Ok, now if we're going to be sending this person updates, we should\n        // start by catching them up to our current state, which we'll do by\n        // sending a \"welcome\". \"generate_braid\" calculates the versions\n        // comprising this welcome (we need to calculate them because we store\n        // the versions inside a space dag, and we need to pull them out...\n        // note that it wouldn't work to just keep the versions around on the\n        // side, because we also prune the space dag, meaning that the\n        // versions generated here may be different than the version we\n        // originally received, though hopefully no versions already known to\n        // this incoming peer will have been modified, or if they have been,\n        // hopefully those versions are deep enough in the incoming peer's\n        // version dag that they are not the direct parents of any new edits\n        // made by them... we strive to enforce this fact with the pruning\n        // algorithm)\n\n        var welcome_msg = node.create_welcome_message(key, parents)\n\n        // Remember this subscription from origin so that we can fissure if\n        // our connection to origin breaks\n        if (u.has_keep_alive(origin, key))\n            resource.keepalive_peers[origin.id] = {\n                id: origin.id,\n                connection: origin.connection,\n                remote_peer: origin.remote_peer\n            }\n\n        // G: ok, here we actually send out the welcome\n\n        origin.send && origin.send(welcome_msg)\n\n        return resource.mergeable.read(version)\n    }\n\n    node.create_welcome_message = (key, parents) => {\n        var resource = node.resource_at(key),\n            versions = resource.mergeable.generate_braid(parents)\n\n        // G: oh yes, we also send them all of our fissures, so they can know\n        // to keep those versions alive\n        var fissures = Object.values(resource.fissures)\n\n        // here we are setting \"parents\" equal to the leaves (aka \"frontier\")\n        // of all ancestors of parents\n        //\n        // Mike asks: Why not just have parents?  I notice it triggers a\n        // desync bug in one of the websocket trials when I remove this line.\n        parents = (parents && Object.keys(parents).length\n                   ? resource.get_leaves(resource.ancestors(parents, true))\n                   : {})\n\n        return {method: 'welcome', key, versions, fissures, parents}\n    }\n    \n    node.error = ({key, type, in_response_to, origin}) => {\n        node.on_errors.forEach(f => f(key, origin))\n    }\n\n    // Can be called as:\n    //  - set(key, val)                     // Set key to val\n    //  - set(key, null, '= \"foo\"')         // Patch with a patch\n    //  - set(key, null, ['= \"foo\"', ...])  // Patch with multiple patches\n    //  - set({key, patches, origin, ...})\n    node.set = (...args) => {\n        var key, patches, version, parents, origin\n\n        // First rewrite the arguments if called as set(key, ...)\n        if (typeof args[0] === 'string') {\n            key = args[0]\n            patches = args[2]\n            if (typeof patches === 'string')\n                patches = [patches]\n            if (!patches)\n                patches = ['= ' + JSON.stringify(args[1])]\n        }\n        else {\n            // Else each parameter is passed explicitly\n            ({key, patches, version, parents, origin} = args[0])\n        }\n\n        var resource = node.resource_at(key)\n\n        // Set defaults\n        if (!version) version = u.random_id()\n        if (!parents) parents = {...resource.current_version}\n\n        // Catch protocol errors\n        try {\n            node.protocol_errors.set({...args, key, version, parents, patches, origin})\n        }\n        catch (errors) { return errors }\n\n        log('set:', {key, version, parents, patches, origin})\n\n        for (p in parents) {\n            if (!resource.time_dag[p]) {\n                // Todo: make this work with origin == null\n                origin && origin.send && origin.send({\n                    method: 'error',\n                    key,\n                    type: 'cannot merge: missing parents',\n                    in_response_to: {\n                        method: 'set',\n                        key, patches, version, parents\n                    }\n                })\n                node.on_errors.forEach(f => f(key, origin))\n                return                    \n            }\n        }\n\n        node.ons.forEach(on => on('set', {key, patches, version, parents, origin}))\n\n        // Cool, someone is giving us a new version to add to our\n        // datastructure.  it might seem like we would just go ahead and add\n        // it, but instead we only add it under certain conditions, namely one\n        // of the following must be true:\n        //\n        // !origin : in this case there is no origin, meaning the version was\n        // created locally, so we definitely want to add it.\n        //\n        // !resource.time_dag[version] : in this case the version must have come\n        // from someone else (or !origin would be true), but we don't have\n        // the version ourselves (otherwise it would be inside our time_dag),\n        // so we want to add this new version we haven't seen before.\n\n        var is_new = !origin                        // Was created locally\n                     || !resource.time_dag[version] // Or we don't have it yet\n        if (is_new) {\n            // G: so we're going to go ahead and add this version to our\n            // datastructure, step 1 is to call \"add_version\" on the resource..\n\n            resource.add_version(version, parents, patches)\n\n            // G: and now for the forwarding of the version to all our peers,\n            // (unless we received this \"set\" from one of our peers,\n            // in which case we don't want to send it back to them)\n\n            log('set: broadcasting to',\n                node.bindings(key)\n                   .filter(p => p.send && (!origin || p.id !== origin.id))\n                   .map   (p => p.id),\n                'pipes from', origin && origin.id)\n\n            node.bindings(key).forEach(pipe => {\n                if (pipe.send && (!origin || (pipe.id !== origin.id))) {\n                    log('set: sending now from', node.pid, pipe.type)\n                    pipe.send({method: 'set',\n                               key, patches, version, parents})\n                }\n            })\n        }\n\n        node.antimatter.set({\n            ...args,\n            key, patches, version, parents, origin, is_new\n        })\n\n        return version\n    }\n    node.set_patch = node.setPatch = (key, patch) => node.set({key, patches: [patch]})\n\n    // Todo:\n    //  - Rename min_leaves and unack_boundary to unack_from and unack_to\n    node.welcome = (args) => {\n        var {key, versions, fissures, unack_boundary, min_leaves, parents, origin} = args\n\n        // Note: `versions` is actually array of set messages.\n\n        // Catch protocol errors\n        try {\n            node.protocol_errors.welcome(args)\n        }\n        catch (errors) { return errors }\n\n        var resource = node.resource_at(key)\n\n        // let people know about the welcome\n        node.ons.forEach(\n            on => on('welcome', {key, versions, fissures, unack_boundary, min_leaves, origin})\n        )\n\n        // Some of the incoming versions we may already have.  So one might\n        // ask, why don't we just filter the versions according to which ones\n        // we already have? why this versions_to_add nonsense? The issue is\n        // that there may be versions which we don't have, but that we don't\n        // want to add either, presumably because we pruned them, and this\n        // code seeks to filter out such versions. The basic strategy is that\n        // for each incoming version, if we already have that version, not\n        // only do we want to not add it, but we don't want to add any\n        // incoming ancestors of that version either (because we must already\n        // have them, or else we did have them, and pruned them)\n\n        var versions_to_add = {}\n        versions.forEach(v => versions_to_add[v.version] = v.parents)\n        versions.forEach(v => {\n            // For each incoming version...\n            // ... if we have this version already:\n            if (resource.time_dag[v.version]) {\n                // Then remove it and its ancestors from our \"stuff to add\"\n                remove_ancestors(v.version)\n                function remove_ancestors (v) {\n                    if (versions_to_add[v]) {\n                        Object.keys(versions_to_add[v]).forEach(remove_ancestors)\n                        delete versions_to_add[v]\n                    }\n                }\n            }\n        })\n\n        // Now versions_to_add will only contain truthy values for versions\n        // which we really do want to add (they are new to us, and they are\n        // not repeats of some version we had in the past, but pruned away)\n\n        var added_versions = []\n        for (var v of versions) {\n            if (versions_to_add[v.version]) {\n                if (!Object.keys(v.parents).every(p => resource.time_dag[p]))\n                    return send_error()\n\n                resource.add_version(v.version, v.parents, v.patches, v.hint)\n                added_versions.push(v)\n            }\n        }\n\n        function send_error() {\n            origin.send && origin.send({\n                method: 'error',\n                key,\n                type: 'cannot merge: missing necessary versions',\n                in_response_to: {\n                    method: 'welcome',\n                    key, versions, fissures, unack_boundary, min_leaves\n                }\n            })\n            node.on_errors.forEach(f => f(key, origin))\n        }\n\n        // Let's also check to make sure we have the min_leaves and unack_boundary,\n        // if they are specified..\n        if (((min_leaves && Object.keys(min_leaves).some(k => !resource.time_dag[k]))\n             || (unack_boundary && Object.keys(unack_boundary).some(k => !resource.time_dag[k]))))\n            return send_error()\n        \n        node.antimatter.welcome({...args, versions_to_add, added_versions})\n        \n        // Now that we processed the welcome, set defaults if we have one\n        var default_val = default_val_for(key)\n        if (default_val && !node.current_version(key)) {\n            node.set({key, patches: [` = ${JSON.stringify(default_val)}`], version: 'default_version', parents: {}})\n        }\n    }\n    \n    // Can be called as:\n    //  - forget(key, cb), with the same cb passed to get(key, cb)\n    //  - forget({key, origin})\n    node.forget = (...args) => {\n        var key, origin, cb\n        if (typeof(args[0]) === 'string') {\n            key = args[0]\n            cb = args[1]\n            origin = cb.pipe\n        } else {\n            ({key, origin} = args[0])\n        }\n\n        log(`forget: ${node.pid}, ${key}->${origin.id}`)\n\n        // Catch protocol errors\n        try {\n            node.protocol_errors.forget({...args, key, origin})\n        }\n        catch (errors) { return errors }\n        node.ons.forEach(on => on('forget', {key, origin}))\n\n        var resource = node.resource_at(key)\n        delete resource.keepalive_peers[origin.id]\n        node.unbind(key, origin)\n        node.incoming_subscriptions.delete(key, origin.id)\n\n        // todo: what are the correct conditions to send the forget?\n        // for now, we just support the hub-spoke model, where only clients\n        // send forget.\n        // here is what the todo said before:\n        // TODO: if this is the last subscription, send forget to all gets_out\n        // origin.send({method: 'forget', key})\n        if (cb && node.incoming_subscriptions.count(key) == 0) {\n            node.bindings(key).forEach(pipe => {\n                pipe.send && pipe.send({\n                    method:'forget', key, origin\n                })\n            })\n        }\n    }\n\n    node.ack = (args) => {\n        var {key, valid, seen, version, origin} = args\n\n        try {\n            node.protocol_errors.ack(args)\n        }\n        catch (errors) { return errors }\n\n        node.ons.forEach(on => on('ack', {key, valid, seen, version, origin}))\n        log('node.ack: Acking!!!!', {key, seen, version, origin})\n\n        node.antimatter.ack(args)\n    }\n    \n    node.fissure = ({key, fissure, origin}) => {\n        try {\n            node.protocol_errors.fissure({key, fissure, origin})\n        }\n        catch (errors) { return errors }\n\n        node.ons.forEach(on => on('fissure', {key, fissure, origin}))\n\n        node.antimatter.fissure({key, fissure, origin})\n    }\n\n    node.disconnected = ({key, name, versions, parents, time, origin}) => {\n        // Todo:\n        //  - rename \"name\" to \"fissure\".\n        //  - rename \"time\" to \"disconnect_time\"\n        if (!time) time = Date.now()\n        node.ons.forEach(on => on('disconnected', {key, name, versions, parents, time, origin}))\n\n        // unbind them (but only if they are bound)\n        if (node.bindings(key).some(p => p.id == origin.id)) node.unbind(key, origin)\n\n        node.antimatter.disconnected({key, name, versions, parents, time, origin})\n    }\n    \n    node.delete = () => {\n        // NOT IMPLEMENTED: idea: use \"undefined\" to represent deletion\n        // update: we now have a {type: \"deleted\"} thing (like {type: \"location\"}),\n        // may be useful for this\n    }\n\n    node.current_version = (key) =>\n        Object.keys(node.resource_at(key).current_version).join('-') || null\n    node.versions = (key) => Object.keys(node.resource_at(key).time_dag)\n    node.fissures = (key) => Object.values(node.resource_at(key).fissures).map(\n        fiss => ({ ...fiss,\n                   // Reformat `versions` and `parents` as arrays\n                   parents:  Object.keys(fiss.parents),\n                   versions: Object.keys(fiss.versions) }))\n    node.unmatched_fissures = (key) => {\n        var result = []\n        var fissures = node.resource_at(key).fissures\n        outer_loop:\n        for (fiss in fissures) {\n            for (fiss2 in fissures) {\n                if (   fissures[fiss].conn === fissures[fiss2].conn\n                    && fissures[fiss].a    === fissures[fiss2].b\n                    && fissures[fiss].b    === fissures[fiss2].a)\n                    continue outer_loop\n            }\n            fiss = fissures[fiss]\n            result.push({...fiss,\n                         // Reformat `versions` and `parents` as arrays\n                         parents:  Object.keys(fiss.parents),\n                         versions: Object.keys(fiss.versions)})\n        }\n        return result\n    }\n\n    node.default = (key, val) => {\n        var is_wildcard = key[key.length-1] === '*'\n        var v = val\n        if (is_wildcard) {\n            // Wildcard vals must be functions\n            if (typeof val !== 'function')\n                v = () => val\n            node.default_patterns[key.substr(0,key.length-1)] = v\n        }\n        else\n            node.defaults[key] = val\n    }\n    function default_val_for (key) {\n        if (key in node.defaults) {\n            // console.log('Default('+key+') is', node.defaults[key])\n            return node.defaults[key]\n        }\n\n        for (pattern in node.default_patterns)\n            if (pattern === key.substr(0, pattern.length)) {\n                // console.log('Default('+key+') is', node.default_patterns[pattern])\n                return node.default_patterns[pattern](key)\n            }\n    }\n    node._default_val_for = default_val_for;\n\n    function create_resource(resource = {}) {\n        // The version history\n        if (!resource.time_dag) resource.time_dag = {}\n        if (!resource.current_version) resource.current_version = {}\n        if (!resource.version_cache) resource.version_cache = {}\n\n        resource.add_version = (version, parents, patches, hint) => {\n            if (resource.time_dag[version])\n                return\n            if (!Object.keys(parents).length\n                && Object.keys(resource.time_dag).length)\n                return\n\n            resource.time_dag[version] = {...parents}\n\n            // TODO: Store hint in the version_cache; not sort_keys\n            var sort_keys = (hint && hint.sort_keys) || undefined\n            resource.version_cache[version] = JSON.parse(JSON.stringify({\n                version, parents, patches, sort_keys\n            }))\n\n            Object.keys(parents).forEach(k => {\n                if (resource.current_version[k])\n                    delete resource.current_version[k]\n            })\n            resource.current_version[version] = true\n\n            resource.mergeable.add_version(version, parents, patches, hint)\n        }\n\n        resource.ancestors = (versions, ignore_nonexistent) => {\n            var result = {}\n            // console.log('ancestors:', versions)\n            function recurse (version) {\n                if (result[version]) return\n                if (!resource.time_dag[version]) {\n                    if (ignore_nonexistent) return\n                    assert(false, 'The version '+version+' no existo')\n                }\n                result[version] = true\n                Object.keys(resource.time_dag[version]).forEach(recurse)\n            }\n            Object.keys(versions).forEach(recurse)\n            return result\n        }\n        resource.get_leaves = (versions) => {\n            var leaves = {...versions}\n            Object.keys(versions).forEach(v => {\n                Object.keys(resource.time_dag[v]).forEach(p => delete leaves[p])\n            })\n            return leaves\n        }\n\n        // A data structure that can merge simultaneous operations\n        if (!resource.merge_type) resource.merge_type = 'sync9'\n        resource.mergeable = require(\n            `../${resource.merge_type}/${resource.merge_type}.js`\n        )(resource)\n\n        // Peers that we have sent a welcome message to\n        if (!resource.keepalive_peers) resource.keepalive_peers = {}\n\n        // Have we been welcomed yet?  (Has the data loaded?)\n        if (!resource.weve_been_welcomed) resource.weve_been_welcomed = false\n\n        // Disconnections that have occurred in the network without a forget()\n        if (!resource.fissures) resource.fissures = {}\n\n        // Acknowledgement data\n        if (!resource.acked_boundary) resource.acked_boundary = {}\n        if (!resource.unack_boundary) resource.unack_boundary = {}\n        if (!resource.acks_in_process) resource.acks_in_process = {}\n        \n        return resource\n    }\n    node.create_resource = create_resource\n\n    // ===============================================\n    //\n    //   Bindings:\n    //\n    //         Attaching pipes to events\n    //\n    function pattern_matcher () {\n        // The pipes attached to each key, maps e.g. 'get /point/3' to '/30'\n        var handlers = u.one_to_many()\n        var wildcard_handlers = []  // An array of {prefix, funk}\n\n        var matcher = {\n            // A set of timers, for keys to send forgets on\n            bind (key, pipe, allow_wildcards) {\n                allow_wildcards = true // temporarily\n                if (allow_wildcards && key[key.length-1] === '*')\n                    wildcard_handlers.push({prefix: key, pipe: pipe})\n                else\n                    handlers.add(key, pipe.id, pipe)\n\n                // Now check if the method is a get and there's a gotton\n                // key in this space, and if so call the handler.\n            },\n\n            unbind (key, pipe, allow_wildcards) {\n                allow_wildcards = true // temporarily\n                if (allow_wildcards && key[key.length-1] === '*')\n                    // Delete wildcard connection\n                    for (var i=0; i<wildcard_handlers.length; i++) {\n                        var handler = wildcard_handlers[i]\n                        if (handler.prefix === key && handler.pipe.id === pipe.id) {\n                            wildcard_handlers.splice(i,1)  // Splice this element out of the array\n                            i--                            // And decrement the counter while we're looping\n                        }\n                    }\n                else\n                    // Delete direct connection\n                    handlers.delete(key, pipe.id)\n            },\n\n            bindings (key) {\n                // Note:\n                //\n                // We need the bindings that persist state to the database to come\n                // first.  In statebus we added a .priority flag to them, and\n                // processed those priority handlers first.  We haven't implemented\n                // that yet, and are just relying on setting these handlers first in\n                // the array and hash, which makes them come first.  But we need to\n                // make this more robust in the future.\n                //\n                // We might, instead of doing a .priority flag, have separate\n                // .on_change and .on_change_sync handlers.  Then the database stuff\n                // would go there.\n\n                assert(typeof key === 'string',\n                       'Error: \"' + key + '\" is not a string')\n\n                var result = u.dict()\n\n                // First get the exact key matches\n                var pipes = handlers.get(key)\n                for (var i=0; i < pipes.length; i++)\n                    result[pipes[i].id] = pipes[i]\n\n                // Now iterate through prefixes\n                for (var i=0; i < wildcard_handlers.length; i++) {\n                    var handler = wildcard_handlers[i]\n                    var prefix = handler.prefix.slice(0, -1)       // Cut off the *\n\n                    if (prefix === key.substr(0,prefix.length))\n                        // If the prefix matches, add it to the list!\n                        result[handler.pipe.id] = handler.pipe\n                }\n                return Object.values(result)\n            }\n        }\n        return matcher\n    }\n\n    // Give the node all methods of a pattern matcher, to bind keys and pipes\n    Object.assign(node, pattern_matcher())\n\n    node.parse_patch = u.parse_patch\n\n    node.websocket_client = (args) => require('./websocket-client.js')({\n        ...args,\n        node: node,\n        create_websocket: () => new (require('ws'))(args.url)\n    })\n\n    return node\n}\n"
  },
  {
    "path": "kernel/package.json",
    "content": "{\n  \"name\": \"braid-bus\",\n  \"version\": \"0.0.1\",\n  \"description\": \"\",\n  \"scripts\": {\n    \"test\": \"node test/tests.js\",\n    \"prepublish\": \"node ../util/braid-bundler.js\"\n  },\n  \"author\": \"Braid Working Group\",\n  \"repository\": \"braid-org/braidjs\",\n  \"homepage\": \"https://braid.org\",\n  \"files\": [\n    \"*\"\n  ],\n  \"main\": \"bus.js\",\n  \"exports\": {\n    \"require\": \"./bus.js\",\n    \"import\": \"./bus.mjs\"\n  },\n  \"dependencies\": {\n    \"better-sqlite3\": \"^5.4.3\",\n    \"parse-headers\": \"^2.0.4\",\n    \"ws\": \"^7.3.1\"\n  }\n}\n"
  },
  {
    "path": "kernel/pipe.js",
    "content": "// A pipe is a network connection that can get disconnected and reconnected.\n//\n// A pipe can send and receive.  The user supplies a `send_function` that:\n//\n//   • will be called from pipe.send(), and\n//   • will return a result to pipe.recv().\n//\n// When a pipe disconnects, it will automatically send out fissures.  When it\n// re-connects, it will automatically re-establish connections.\n//\n// Todo:\n//   • Describe the connect process and connect() function\n//\nmodule.exports = require.pipe = function create_pipe({node, id, send, connect, disconnect, type}) {\n    assert(node && send && connect, {node,send,connect})\n    id = id || u.random_id()\n\n    var ping_time = 50000\n    var death_time = 40000\n    var ping_timer = null\n\n    function on_pong() {\n        if (typeof(g_is_wiki_tester) != 'undefined') { return }\n\n        clearTimeout(ping_timer)\n        ping_timer = setTimeout(() => {\n            send.call(pipe, {method: 'ping'})\n            ping_timer = setTimeout(() => disconnect.call(this), death_time)\n        }, ping_time)\n    }\n    // The Pipe Object!\n    var pipe = {\n\n        // A pipe holds some state:\n        id: id,\n        type: type, // Only used for debugging\n        connection: null,\n        connecting: false,\n        remote_peer: null,\n        most_recent_remote_peer: null,\n        subscribed_keys: u.dict(),\n        //remote: true,\n\n        // It can Send and Receive messages\n        send (args) {\n            var we_welcomed = args.key && node.resource_at(args.key).keepalive_peers[this.id]\n            assert(args.method !== 'hello')\n\n            // Record new keys\n            if (args.method === 'get') {\n                assert(!this.connection\n                       || !this.subscribed_keys[args.key]\n                       || !this.subscribed_keys[args.key].we_requested,\n                       'Duplicate get 1:', args,\n                       {connection: this.connection,\n                        subscription: this.subscribed_keys[args.key]})\n\n                assert(args.key, node.resource_at(args.key).mergeable)\n\n                // Initialize subscribed_keys\n                this.subscribed_keys[args.key] =\n                    this.subscribed_keys[args.key] || {}\n\n                // Remember that we requested this subscription\n                this.subscribed_keys[args.key].we_requested = args.subscribe\n\n                // If this is the first message, let's try to connect the pipe.\n                if ( this.connecting) return\n                if (!this.connection) {\n                    this.connecting = true\n\n                    // Run the programmer's connect function\n                    connect.call(this)\n\n                    // Don't run the send code below, since we'll send this\n                    // get when the connection completes\n                    return\n                }\n            }\n\n            else if (args.method === 'forget') {\n                // Record forgotten keys\n                delete this.subscribed_keys[args.key].we_requested\n                node.unbind(args.key, this)\n            }\n\n            else if (args.method === 'welcome' && !args.unack_boundary) {\n                // If we haven't welcomed them yet, ignore this message\n            }\n\n            else if (!we_welcomed) {\n                // Oh shit, I think this is a bug.  Cause if they welcomed us,\n                // we wanna send them shit too... but maybe we need to start\n                // by welcoming them.\n                log('gooooo away', we_welcomed)\n                return\n            }\n\n            // Clean out the origin... because we don't use that.\n            delete args.origin\n\n            // And now send the message\n            if (this.connection)\n                send.call(this, args)\n            else\n                log('FAILED to send, because pipe not yet connected..')\n        },\n        recv (args) {\n            var we_welcomed = args.key && node.resource_at(args.key).keepalive_peers[this.id]\n\n            // ping/pong system\n            if (args.method === 'ping') {\n                send.call(this, {method: 'pong'})\n                return\n            } else if (args.method === 'pong') {\n                on_pong()\n                return\n            }\n\n            // The hello method is only for pipes\n            if (args.method === 'hello') {\n                this.connection = (this.connection < args.connection\n                                   ? this.connection : args.connection)\n                this.most_recent_remote_peer = this.remote_peer = args.my_name_is\n\n                // hello messages don't do anything else (they are just for\n                // the pipe)\n                return\n            }\n\n            if (args.method === 'welcome'\n                && !we_welcomed\n                /*&& !this.subscribed_keys[args.key].we_requested*/) {\n                // Then we need to welcome them too\n                let parents = {...args.parents}\n                args.versions.forEach(v => parents[v.version] = true)\n                this.send(node.create_welcome_message(args.key, parents))\n        \n                // Now we store a subset of this pipe in a place that will\n                // eventually be saved to disk.  When a node comes up after a\n                // crash, it'll need to create and send fissures for everyone\n                // it's welcomed.  So right here we store the info necessary\n                // to fissure.\n                let resource = node.resource_at(args.key)\n                resource.keepalive_peers[this.id] = {id: this.id,\n                                                     connection: this.connection,\n                                                     remote_peer: this.remote_peer}\n            }\n\n            // Remember new subscriptions from them\n            if (args.method === 'get') {\n                // assert(!(this.subscribed_keys[args.key]\n                //          && this.subscribed_keys[args.key].they_requested),\n                //        'Duplicate get 2:', args,\n                //        {subscription: this.subscribed_keys[args.key]})\n\n                // Initialize subscribed_keys\n                this.subscribed_keys[args.key] =\n                    this.subscribed_keys[args.key] || {}\n\n                // Record their subscription\n                this.subscribed_keys[args.key].they_requested = args.subscribe\n            }\n\n            args.origin = this\n            node[args.method](args)\n\n            if (args.method === 'get')\n                log('pipe.recv: New remote!', this.id,\n                    'Now we have',\n                    node.bindings(args.key).filter(pipe => pipe.remote).length)\n        },\n\n        // It can Connect and Disconnect\n        connected () {\n            // console.log('pipe.connect:', this.id, this.connection || '')\n\n            if (this.connection) {\n                log('pipe.connect:', this.id, 'already exists! abort!')\n                return\n            }\n\n            this.connecting = false\n\n            // Create a new connection ID\n            this.connection = u.random_id()\n\n            // Initiate connection with peer\n            log('sending hello..')\n\n            send.call(this, {method: 'hello',\n                             connection: this.connection,\n                             my_name_is: node.pid})\n\n            // Send gets for all the subscribed keys again\n            for (k in this.subscribed_keys) {\n                // This one is getting called earlier.\n                //\n                // The send() function wants to make sure this isn't a\n                // duplicate request, so let's delete the old one now so\n                // that we can recreate it.\n\n                var subscribe = this.subscribed_keys[k].we_requested\n                delete this.subscribed_keys[k].we_requested\n\n                var best_t = -Infinity\n                var best_parents = null\n                Object.values(node.resource_at(k).fissures).forEach(f => {\n                    if (f.a == node.pid && f.b == this.most_recent_remote_peer && f.time > best_t) {\n                        best_t = f.time\n                        best_parents = f.versions\n                    }\n                })\n\n                this.send({\n                    key: k,\n                    subscribe: subscribe,\n                    method: 'get',\n                    parents: best_parents\n                })\n            }\n\n            on_pong()\n        },\n        disconnected () {\n            clearTimeout(ping_timer)\n\n            for (var k in this.subscribed_keys) {\n\n                if (u.has_keep_alive(this, k))\n                    // Tell the node.  It'll make fissures.\n                    node.disconnected({key:k, origin: this})\n\n                // Drop all subscriptions not marked keep_alive\n                var s = this.subscribed_keys[k]\n                if (!(s.we_requested   && s.we_requested.keep_alive  ))\n                    delete s.we_requested\n                if (!(s.they_requested && s.they_requested.keep_alive))\n                    delete s.they_requested\n\n                // If both are gone, remove the whole subscription\n                if (!(s.we_requested || s.they_requested))\n                    delete this.subscribed_keys[k]\n            }\n\n            this.connecting = false\n            this.connection = null\n            this.remote_peer = null\n        },\n\n        printy_stuff (key) {\n            return {id: this.id,\n                    w: !!node.resource_at(key).keepalive_peers[this.id],\n                    k_a: u.has_keep_alive(this, key),\n                    peer: this.remote_peer,\n                    c: !!this.connection\n                   }\n        }\n    }\n\n    return pipe\n}\n\n"
  },
  {
    "path": "kernel/readme.md",
    "content": "# A prototype Braid Kernel\n\nAn abstraction for distributed state.\n\n## Status\n\nWe've built some cool algorithms in here, but it isn't cleaned up for release\nyet.  Mike is working on it!\n\n## Running the code\nIf you have nodejs installed, then set it up with:\n```\nnpm install\n```\n\n### Chat demo\nYou can run the chat server with:\n\n```\ncd demos/sync9-chat\nnode chat-server.js\n```\n\nThen open a web browser to `http://localhost:3009/braidchat` (for a websocket connection) or `.../braidchat?protocol=http` for a backwards-compatible http/1.1 connection.\n\n### Wiki demo\n\nYou can run the wiki server with:\n```\nnode demos/wiki/wiki-server.js\n```\nAnd then open `http://localhost:3009/<any-path-here>`.\n\n### Seeing the guts\n\nFor any command, you can tell it to print out all network traffic in a table\nby adding the command-line argument `--network` to it, like this:\n\n```\nnode chat-server.js --network\n```\n\nThen you'll see something like this:\n\n```\nws: server --> C-j2lm GET     {\"key\":\"/usr\",\"parents\":null,\"subscribe\":{\"keep_alive\":true}}\nws: server --> C-j2lm WELCOME {\"key\":\"/usr\",\"versions\":[{\"version\":null,\"parents\":{},\"changes\":[\" = {\\\"B-0bnyC1mdA9\\\":\\\"FirefoxHTTP\\\"}\"]}\nws: C-j2lm --> server WELCOME {\"key\":\"/chat\",\"versions\":[],\"fissures\":[],\"parents\":null}\nws: C-j2lm --> server WELCOME {\"key\":\"/usr\",\"versions\":[],\"fissures\":[],\"parents\":null}\nws: C-j2lm --> server SET     {\"key\":\"/usr\",\"patches\":[\"[\\\"B-0bnyC1mdA9\\\"] = \\\"FrefoxHTTP\\\"\"],\"version\":\"bz2gyet9cv6\",\"parents\":{\"66mn2f0vco8\":true}}\n```\n\n## Running tests:\n\n```\nnpm test\n```\n\nIf you want to see what it's doing, print out the network traffic with:\n\n```\nnpm test network\n```\n\nWhat if one of the trials crashes?  To debug it, re-run that particular trial\nwith:\n\n```\nnpm test solo 68\n```\n\nThis will re-run trial 68, and print out debugging info so you can find the\nproblem and fix it.\n\nYou can also configure parameters to test at the top of `test/tests.js`.\n\n"
  },
  {
    "path": "kernel/sqlite-store.js",
    "content": "\n// options = {\n//     table_name: 'store' // <-- default, a table of this name will be created in sqlite\n// }\n// options also passed down to 'store.js'\nmodule.exports = require['sqlite-store'] = function create_sqlite_store(filename, tablename) {\n    var db = new (require('better-sqlite3'))(filename)\n    if (!tablename)\n        tablename = 'store'\n\n    db.pragma('journal_mode = WAL')\n    db.prepare(`create table if not exists ${tablename} (key text primary key, val text)`).run()\n\n    const GET_STATEMENT = db.prepare(`select * from ${tablename} where key = ?`)\n    const SET_STATEMENT = db.prepare(`replace into ${tablename} (key, val) values (?, ?)`)\n    const DEL_STATEMENT = db.prepare(`delete from ${tablename} where key = ?`)\n    const LIST_STATEMENT = db.prepare(`select key from ${tablename}`);\n    return {\n        get(key) {\n            var row = GET_STATEMENT.get([key])\n            return row && row.val\n        },\n        set(key, data) {\n            SET_STATEMENT.run([key, data])\n        },\n        del(key) {\n            DEL_STATEMENT.run([key])\n        },\n        list_keys() {\n            return LIST_STATEMENT.all().map(x => x.key);\n        }\n    }\n}\n"
  },
  {
    "path": "kernel/store.js",
    "content": "\n// options = {\n//     compress_if_inactive_time: 4000 // <-- default, means it will compress 4 seconds after the last edit, as long as no other edits happen\n//     compress_after_this_many: 10000 // <-- default, means it will compress if there are 10000 uncompressed edits\n// }\n// db = {\n//     get(key, cb)\n//     set(key, val, cb)\n//     del(key, cb)\n//     list_keys(cb)\n//}\nmodule.exports = require.store = function create_store(node, db, options) {\n    if (!options) options = {}\n    if (options.compress_if_inactive_time == null) options.compress_if_inactive_time = 4000\n    if (options.compress_after_this_many == null) options.compress_after_this_many = 10000\n\n    var inactive_timers = {}\n    var nexts = {}\n\n    let pid = db.get('pid');\n    node.pid = pid || node.pid;\n    // Set the node's PID, and then play back the db into the node\n    db.set('pid', node.pid)\n    fastforward()\n\n    // When something happens in the node, record it, and reset the the inactivity timer\n    node.ons.push((method, arg) => {\n        var key = arg.key\n        add(key, { method, arg })\n\n        var n = nexts[key]\n        if (typeof (g_debug_WS_messages) != 'undefined') {\n            if (n[1] >= options.compress_after_this_many)\n                g_debug_WS_messages.push(() => compress(key))\n        } else {\n            clearTimeout(inactive_timers[key])\n            // If we've had enough messages, compress right away\n            // Otherwise, compress in a few seconds\n            inactive_timers[key] = setTimeout(() => compress(key),\n                n[1] >= options.compress_after_this_many ? 0 : options.compress_if_inactive_time)\n        }\n    })\n    // Ensure the node knows that it's totally disconnected at startup.\n    Object.entries(node.resources).forEach(([key, r]) =>\n        Object.values(r.keepalive_peers).forEach(pipe => {\n            node.disconnected({ key, origin: pipe })\n        })\n    )\n\n    return Promise.all(Object.keys(nexts).map(compress)).then(_ => node);\n\n    function fastforward() {\n        // console.log(\"Fast-forwarding braid state using db...\")\n        // For all ab:... keys\n        let keys = db.list_keys();\n        keys.filter(k => k.match(/^ab:/)).map((k) => {\n            let ab = db.get(k);\n            // Get the part after ab\n            // Sorry this isn't more informative, I do not understand the db format\n            let key = k.slice(3)\n            var i = find_open_index(ab, key, (val) => {\n                // Pass the stored braid messages to the node\n                let msg = JSON.parse(val)\n                if (!msg.method) {\n                    node.resources[key] = node.create_resource(msg)\n                    Object.values(node.resources[key].keepalive_peers).forEach(pipe => {\n                        pipe.remote = true\n                        node.bind(key, pipe)\n                        node.incoming_subscriptions.add(key, pipe.id, pipe)\n                    })\n                }\n                else node[msg.method](msg.arg)\n            })\n            // Set nexts once we've found the open index\n            nexts[key] = [ab, i];\n        })\n    }\n\n    function add(key, x) {\n        var n = nexts[key]\n        if (!n) {\n            db.set(`ab:${key}`, 'a');\n            n = nexts[key] = ['a', 0]\n        }\n        // Try to set the key as the next element in the sequence\n        try {\n            db.set(`${n[0]}:${n[1]++}:${key}`, JSON.stringify(x))\n        } catch (err) {\n            console.error(err);\n            console.error(`Failed to set key ${n[0]}:${n[1]++}:${key} to value`);\n            console.dir(x, { depth: 5 });\n        }\n    }\n    function compress(key) {\n        var n = nexts[key]\n        if (!n) return\n        var ab = (n[0] == 'a') ? 'b' : 'a'\n        let i = find_open_index(ab, key, (_, ii) => \n            // Count up and delete\n            db.del(`${ab}:${ii}:${key}`)\n        )\n        // At the top\n        nexts[key] = [ab, 0]\n        add(key, node.resource_at(key));\n        db.set(`ab:${key}`, ab)\n        for (let ii = n[1] - 1; ii >= 0; ii--)\n            db.del(`${n[0]}:${ii}:${key}`)\n    }\n\n    function find_open_index(ab, key, intermediate) {\n        let i = 0;\n        let val;\n        while (val = db.get(`${ab}:${i}:${key}`)) {\n            // Do something with the lower values of i\n            intermediate && intermediate(val, i++);\n        }\n        return i;\n    }\n}\n"
  },
  {
    "path": "kernel/test/tests.js",
    "content": "require('../../sync9/sync9.js')\nrequire('../../util/utilities.js')\n\n//show_debug = true\n\nvar n_peers = 3\nvar n_steps_per_trial = 100\nvar n_trials = 100\nvar rand = null\nvar random_seed_base = '000_hi_010bcdefg'\nshow_protocol_errors = true\n\nsolo_trial = null\n\nif (!is_browser && process.argv.length >= 4 && process.argv[2] === 'solo') {\n    solo_trial = parseInt(process.argv[3])\n    // show_debug = true\n    print_network = true\n}\n\n\n// show_debug = true\n// print_network = true\n\nvar sim = {\n    n_peers,\n    n_steps_per_trial,\n    n_trials,\n\n    rand,\n    step,\n    add_peer,\n\n    peers_dict: {},\n    peers: []\n}\nsim.vis = is_browser\n    ? require('../demos/visualization/visualization.js')(\n        {rand: Math.create_rand(''), ...sim}\n    )\n    : {add_frame() {}}\n\nvar vis = sim.vis\n\nfunction add_peer (node, peer_number) {\n    sim.peers.push(node)\n    make_alphabet(node, peer_number)\n    sim.peers_dict[node.pid] = node\n}\nfunction make_alphabet (node, peer_number) {\n    var alphabets = [\n        'abcdefghijklmnopqrstuvwxyz',\n        'ABCDEFGHIJKLMNOPQRSTUVWXYZ',\n        '⬅︎⬇︎⬆︎',\n        ''\n    ]\n    node.letters = alphabets[peer_number] || ''\n    for (var i = 0; i < 26; i++)\n        node.letters += String.fromCharCode(12032 + 1000*peer_number + i)\n    node.letters_i = 0\n    // console.log('Node', peer_number, 'letters:', node.letters)\n}\n\n\nfunction save_node_copy(node) {\n    var x = JSON.parse(JSON.stringify(node))\n    x.connected_to = {}\n    node.bindings('my_key').forEach(pipe => {\n        var [from, to] = pipe.id.split('-')\n        if (pipe.connecting || pipe.connection) {\n            x.connected_to[to] = true\n        }\n    })\n    return x\n}\n\n\nvar num_edits = 0\nfunction step(frame_num) {\n    // Randomly choose whether to do an action vs. process the network\n    if (rand() < 0.1) {\n        // Do an action\n        if (rand() < 0.9) {\n            // Edit text\n\n            var i = Math.floor(rand() * n_peers)\n            var peer = sim.peers[i]\n\n            // ..but only if we have at least one version already, which\n            // is really to make sure we've received \"root\" already (but\n            // we can't check for \"root\" since it may get pruned away)\n            if (peer.resources['my_key'] &&\n                Object.keys(peer.resources['my_key'].time_dag).length) {\n\n                if (peer.letters_i >= peer.letters.length)\n                    peer.letters_i = 0\n\n                var e = create_random_edit(peer.resources['my_key'],\n                                           peer.letters[peer.letters_i++])\n                num_edits++\n                peer.set({key: 'my_key',\n                          patches: e.patches, version: e.version, parents: e.parents})\n            }\n            log('    editing', frame_num, peer.pid, e ? e.patches : '')\n        } else {\n            // Disconnect or reconnect\n            log('    toggling network', frame_num)\n            network.toggle_pipe()\n        }\n    } else {\n        // Receive incoming network message\n        if (network.receive_message) {\n            log('    receiving message', frame_num)\n            var i = Math.floor(rand() * n_peers)\n            var peer = sim.peers[i]\n            network.receive_message(peer)\n        }\n    }\n    \n    vis.add_frame({\n        frame_num,\n        peers: sim.peers.map(x => save_node_copy(x))\n    })\n}\n\nfunction create_random_edit(resource, letters) {\n    letters = letters || 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'\n    var str = resource.mergeable.read()\n    var start = Math.floor(rand() * (str.length + 1))\n    var del = Math.floor(rand() * rand() * (str.length - start + 1))\n    var ins = letters[Math.floor(rand() * letters.length)].repeat(Math.floor(rand() * 4) + (del == 0 ? 1 : 0))\n    \n    var version = rand().toString(36).slice(2)\n    resource.next_version_id = (resource.next_version_id || 0) + 1\n    var version = letters[0] + resource.next_version_id\n    \n    var patches = [`[${start}:${start + del}] = ` + JSON.stringify(ins)]\n    return {\n        version,\n        parents : Object.assign({}, resource.current_version),\n        patches\n    }\n}\n\nfunction setup_test () {\n    sim.peers = []\n    sim.peers_dict = {}\n\n    if (is_browser)\n        sim.rand = rand = Math.create_rand('')\n\n    network.setup()\n\n    // Start sending get() messages over the pipes!\n    sim.peers.forEach(node => node.get({\n        key: 'my_key',\n        subscribe: {keep_alive: true},\n        origin: {id: 'fake' + rand().toString(36).slice(2,6)}\n    }))\n\n\n    // Create initial root version\n    {\n        // There are two modes of operations.  The differentiator is that in\n        // one mode, you can prune down to a single version, and in the other,\n        // you can only prune down to (in the worst case) the number of\n        // versions there are peers that have ever been a part of the system.\n        // (But often less than that.)\n\n        // In the first mode, you must dictate that all peers don't add\n        // anything unless they've already received a version from someone\n        // else, and you then need a special peer that creates the first\n        // version.\n\n        // But you can add something to a field of nothing.  There used to be\n        // a root node that was always there, but now you're allowed to have a\n        // version with parents where the parents is the empty set, and all\n        // the algorithms are fine with that.\n\n        // So now when we create a new timedag, a special peer will create the\n        // first version and send it to everyone else.  And that's what we do\n        // in the tests code right now.  And we do that so that we can prune\n        // down to one node, and that tells us that the tests are working, at\n        // the end of the tests.  It knows that everything should have exactly\n        // one version, that's the same thing, for all peers.\n        let p = sim.peers[0]\n        p.set({key: 'my_key', version: 'root', parents: {}, patches: ['=\"\"']})\n        vis.add_frame({\n            peers: sim.peers.map(x => save_node_copy(x))\n        })\n    }\n}\n\nfunction evaluate_trial (trial_num) {\n    log('Ok!! Now winding things up.')\n\n    // Make sure the resource exists on each peer\n    sim.peers.forEach((x, i) => {\n        if (!x.resources.my_key) {\n            console.log('missing my_key for ' + x.pid)\n            total_success = false\n            throw 'bad'\n        }\n    })\n    \n    // Do all peers have the same resulting value?\n    var first_peer_val = sim.peers[0].resources.my_key.mergeable.read()\n    var same_values = sim.peers.every(\n        p => u.deep_equals(p.resources.my_key.mergeable.read(), first_peer_val)\n    )\n\n    // Are all time dags pruned down to a single version?\n    var multiple_versions = sim.peers.some(\n        p => Object.keys(p.resources.my_key.time_dag).length > 1\n    )\n\n    // Are all fissures cleaned up?\n    var fissures_exist = sim.peers.some(\n        p => Object.keys(p.resources.my_key.fissures).length > 0\n    )\n\n    // Where there any problems?\n    total_success = same_values && !multiple_versions && !fissures_exist\n\n    // If so, print them out\n    if (show_debug || !total_success) {\n        console.log('TOTAL', total_success ? 'SUCCESS' : 'FAILURE')\n        sim.peers.forEach(\n            n => console.log(n.pid+':', JSON.stringify(n.resources.my_key.mergeable.read()))\n        )\n        var results = {same_values, multiple_versions, fissures_exist}\n        for (k in results)\n            console.log(k+':', results[k])\n        console.log('trial_num:', trial_num)\n        if (!total_success) throw 'stop'\n    }\n}\n\n\n// Synchronous version of the simulator\n//  - Fast and deterministic.  For testing the core algorithm.\nfunction run_trials () {\n    if (solo_trial)\n        run_trial(solo_trial)\n    else\n        for (var i=0; i < n_trials; i++) {\n            console.log('Running trial', network.name, i)\n            run_trial(i)\n        }\n}\nfunction run_trial (trial_num) {\n    rand = sim.rand = Math.create_rand(random_seed_base + ':' + trial_num)\n    setup_test()\n\n    // Now do all the stuff\n    for (var t=0; t < n_steps_per_trial; t++) {\n        log('looping', t)\n        step(t)\n    }\n    network.wrapup()\n    evaluate_trial(trial_num)\n    if (network.die) network.die()\n}\n\n// Async version of the simulator\n//  - For testing actual network activity\nrun_trials.async = (cb) => {\n    if (solo_trial)\n        run_trial.async(solo_trial, cb)\n    else {\n        var i = -1\n        function next_trial () {\n            i++\n            console.log('Running trial', network.name, i)\n            if (i === n_trials)\n                setImmediate(cb)\n            else\n                setImmediate(() => run_trial.async(i, next_trial))\n        }\n        setTimeout(next_trial, 10)\n    }\n}\nrun_trial.async = (trial_num, cb) => {\n    rand = sim.rand = Math.create_rand(random_seed_base + ':' + trial_num)\n    setup_test()\n    var t = -1\n    function run_step () {\n        t++\n        if (t === n_steps_per_trial)\n            network.wrapup(() => {\n                evaluate_trial(trial_num)\n                if (network.die)\n                    network.die(() => setImmediate(cb))\n                else\n                    setImmediate(cb)\n            })\n        else {\n            log('  step', t)\n            step(t)\n            setTimeout(run_step, 0)\n        }\n    }\n    run_step()\n}\n\n\nvar networks = [\n    './virtual-p2p.js',\n    './websocket-test.js'\n]\n\nvar network\nif (is_browser) {\n    network = require('./virtual-p2p.js')(sim)\n    setup_test()\n    vis.loop()\n} else\n    networks.forEach( n => {\n        network = require(n)(sim)\n        console.log('Running', n.substr(2), 'trials!')\n        if (network.sync)\n            run_trials()\n        else\n            run_trials.async(() => {\n                console.log('Done with all trials!')\n                process.exit()\n            })\n    })\n"
  },
  {
    "path": "kernel/test/virtual-p2p.js",
    "content": "// Tests using a virtual network\n\nmodule.exports = require['virtual-p2p'] = (sim) => (\n    {\n        name: 'virtual',\n        sync: true,\n        setup () {\n            for (var i = 0; i < sim.n_peers; i++) {\n                // Make a peer node\n                var node = require('../node.js')()\n\n                node.pid = 'P' + (i + 1)   // Give it an ID\n                node.incoming = []         // Give it an incoming message queue\n                sim.add_peer(node, i)      // Give it an alphabet\n            }\n            // sim.peers.forEach(p => sim.peers_dict[p.pid] = p)\n\n            // Create pipes that connect peers\n            this.pipes = {}\n            var create_vpipe = (from, to) => {\n                var pipes = this.pipes\n                var pipe = pipes[from.pid + '-' + to.pid] = require('../pipe.js')({\n                    node: from,\n                    id: from.pid + '-' + to.pid,\n\n                    // The send function\n                    send (args) {\n                        if (!this.connection) {\n                            log('sim-pipe.send: starting connection cause it was null on ', this)\n                            this.connected()\n                        }\n                        // console.log('>> ', this.id, args)\n                        assert(from.pid !== to.pid)\n\n                        args = JSON.parse(JSON.stringify(args))\n                        to.incoming.push([from.pid,\n                                          () => {\n                                              pipes[to.pid + '-' + from.pid].recv(\n                                                  JSON.parse(JSON.stringify(args)))\n                                          },\n                                          'msg_id:' + sim.rand().toString(36).slice(2),\n                                          args.method, JSON.parse(JSON.stringify(args))])\n                    },\n\n                    // The connect functions\n                    connect () { this.connected() },\n                    disconnect () { this.disconnected() }\n                })\n\n                from.bind('my_key', pipe)\n            }\n\n            // Create pipes for all the peers\n            for (var p1 = 0; p1 < sim.n_peers; p1++)\n                for (var p2 = p1 + 1; p2 < sim.n_peers; p2++) {\n                    let peer1 = sim.peers[p1],\n                        peer2 = sim.peers[p2]\n                    // Virutal Pipe for A -> B\n                    create_vpipe(peer1, peer2)\n                    // Virtual Pipe for B -> A\n                    create_vpipe(peer2, peer1)\n                }\n        },\n        wrapup (cb) {\n            var sent_joiner = false\n\n            // Connect all the pipes together\n            for (var pipe in this.pipes) {\n                this.pipes[pipe].connected()\n                notes = ['connecting ' + this.pipes[pipe]]\n                sim.vis.add_frame({\n                    t: -1,\n                    peers: sim.peers.map(x => JSON.parse(JSON.stringify(x)))\n                })\n            }\n\n            // Now let all the remaining incoming messages get processed\n            do {\n                sim.peers.forEach(p => {\n                    while (p.incoming.length > 0) {\n                        notes = []\n\n                        // Process the message.\n                        p.incoming.shift()[1]()\n                        // That might have added messages to another peer's queue.\n\n                        sim.vis.add_frame({\n                            peer_notes: {[p.pid]: notes},\n                            peers: sim.peers.map(x => JSON.parse(JSON.stringify(x)))\n                        })\n                    }\n                })\n\n                var more_messages_exist = sim.peers.some(p => p.incoming.length > 0)\n\n                // Once everything's clear, make a joiner\n                if (!more_messages_exist && !sent_joiner) {\n                    var i = Math.floor(sim.rand() * sim.n_peers)\n                    var p = sim.peers[i]\n                    \n                    log('creating joiner')\n                    notes = ['creating joiner']\n\n                    // Create it!\n                    p.set('my_key', null, [])\n                    sent_joiner = true\n                    \n                    sim.vis.add_frame({\n                        peer_notes: {[p.pid]: notes},\n                        peers: sim.peers.map(x => JSON.parse(JSON.stringify(x)))\n                    })\n\n                    // That'll make messages exist again\n                    more_messages_exist = true\n                }\n            } while (more_messages_exist)\n            if (cb) cb()\n        },\n\n        receive_message (peer) {\n            var inbox = peer.incoming\n            if (inbox.length > 0) {\n                var possible_peers = {}\n                inbox.forEach(x => possible_peers[x[0]] = true)\n                possible_peers = Object.keys(possible_peers)\n                var chosen_peer = possible_peers[\n                    Math.floor(sim.rand() * possible_peers.length)]\n\n                var msg = inbox.splice(inbox.findIndex(x => x[0] == chosen_peer),\n                                       1)\n                msg[0][1]()\n            }\n        },\n\n        toggle_pipe () {\n            var pipe_keys = Object.keys(this.pipes),\n                random_index = Math.floor(sim.rand() * pipe_keys.length),\n                random_pipe = this.pipes[pipe_keys[random_index]],\n                [pid, other_pid] = pipe_keys[random_index].split('-'),\n                peer = sim.peers_dict[pid],\n                other_pipe = this.pipes[other_pid + '-' + pid],\n                other_peer = sim.peers_dict[other_pid]\n\n            // Toggle the pipe!\n            assert(!!random_pipe.connection === !!other_pipe.connection,\n                   random_pipe.connection, other_pipe.connection)\n            if (random_pipe.connection) {\n                random_pipe.disconnected()\n                other_pipe.disconnected()\n\n                peer.incoming = peer.incoming.filter(x => x[0] !== other_pid)\n                other_peer.incoming = other_peer.incoming.filter(x => x[0] !== pid)\n            } else {\n                random_pipe.connected()\n                other_pipe.connected()\n            }\n        }\n    }\n)"
  },
  {
    "path": "kernel/test/websocket-test.js",
    "content": "// Tests for the braid-websocket protocol\n\nmodule.exports = require['websocket-test'] = (sim) => (\n    {\n        name: 'websocket',\n        sync: false,\n        certificate: `-----BEGIN CERTIFICATE-----\nMIIDXTCCAkWgAwIBAgIJANoWGfl3pEeHMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV\nBAYTAlVTMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX\naWRnaXRzIFB0eSBMdGQwHhcNMTkwODE2MjAxNTIxWhcNMjAwODE1MjAxNTIxWjBF\nMQswCQYDVQQGEwJVUzETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50\nZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB\nCgKCAQEA1bilKJKH1axV0OLLIwg3WxXx6MMsFL3/bv2uX9+Z22uZukJsgqnR2y+6\nOCLH8opczH4Now3Od+P0G4kNSn9m+T5W5bvf9bIIDmCG/04uGCvx0L8bgYA5lyMJ\naFdcfCXu1iKvUt1LdZlds2AsBfceYCB6FwsMkUODzZ7OJ6R1aXUHxQ74me/ksoxV\nP7Fmv012gRJkYn5gzvrokula2Yxb+z84TP115tALYBBpLhj5WPOXSmyVo0Lf1dGQ\nJfbRxvx32pxZiBPwcNre3yzKhRue99tRuPHFCQBZSkXGuT7K9bsNnPwXfAmB2VbQ\nbjezmqVGv8KnwyTRWdLaEcV9cxHCnQIDAQABo1AwTjAdBgNVHQ4EFgQUOoDGcBG8\nXm/Jj+WbIYctxhGqD6owHwYDVR0jBBgwFoAUOoDGcBG8Xm/Jj+WbIYctxhGqD6ow\nDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAaHjdu8Hg34Zzay4djFSo\nhRno4m+tiJ4UT3oLTHRGh54JFKQPeLLEY0WbhrBDyuDJrCdyjvmqpuELPPwNRdo0\nLy3fhRIxeaN8px6V0bpdj0ePDqC0ZU5It/9jVlC0OkdG2xwJygw+xNLaHb09l7rj\nZLM+tOKQEBxZCLKqc1FLlS9MIxDKaVdI2JSBDmNl+0XyFwKM6bfI3Mk8STuZXm5A\nEtWvDNbLFl6TLyKDeHNRc0LQEa74xE3yhoWO3kb9phL4A1g/I7rW+B2we4N84FfT\nv5C5/zn58xabUtMVeGUi/avnVz+C4HY4ZMEIQPIodtsRcZq05RQGW8ipig7QaXnD\ngQ==\n-----END CERTIFICATE-----\n`,\n        private_key: `-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDVuKUokofVrFXQ\n4ssjCDdbFfHowywUvf9u/a5f35nba5m6QmyCqdHbL7o4IsfyilzMfg2jDc534/Qb\niQ1Kf2b5Plblu9/1sggOYIb/Ti4YK/HQvxuBgDmXIwloV1x8Je7WIq9S3Ut1mV2z\nYCwF9x5gIHoXCwyRQ4PNns4npHVpdQfFDviZ7+SyjFU/sWa/TXaBEmRifmDO+uiS\n6VrZjFv7PzhM/XXm0AtgEGkuGPlY85dKbJWjQt/V0ZAl9tHG/HfanFmIE/Bw2t7f\nLMqFG57321G48cUJAFlKRca5Psr1uw2c/Bd8CYHZVtBuN7OapUa/wqfDJNFZ0toR\nxX1zEcKdAgMBAAECggEAWCxLh0ec3tywsvM+V3+mRt/w49TRtOUGIyZp8IfxlAL6\nc0vANNAXElTIgSxoTXoj+wHuYlzp17CmH04Vu6yAMUg01acDKPyAMl5Ek8QPZE2N\nAFA36t+Z4u7DjNauA1IrDRFWP9uorCXP8Jc20mc3kvUTKbqXPr8Z+5UO/G/vOMgc\nQKXPoz45EbFahTwck4TQowLeKhAF3BU5fn48zuBy055q6babV1z0LDzDIUGcZqHv\n4VPMLOUp1KzpwoQd6o3wwBBttJkFqBf7US3nExdq0SkHgwE/lOKgJuSMmgAWgGm5\n3iO8F+Ve84206IgmhQOMw3KZjIgWdiCW/dgVbJQsQQKBgQD4/2Wr4NYfdXqotnjT\nMZCx5921nFwkMyt7JndCIs49CQ3lMGtlijRtVHGhZKVHUZr4SKKfjbOAiABPCsRL\nZhvVnhlbmUioSgfMM/Y+fkCs3DdzuJE9tVuSdyQFoblY5W1dLeuLTEI3TDTos+V7\njfKsHMqF0gAbCkt7GgVpy5vCRQKBgQDbu0ibskjwF5voMuJmJdvIv0XAi91sRTRJ\nRuDrH6NPU+RrVHTHRJMtGRM5zWI4b7N0KTx+J2xaJ6J/FxbfsdThgKb99gB9j3hR\nF0CK/quMjAwpezWwatHarK87c//rvmIBVL82xLe3sQKxmwdCUiyhum/4l+GN+WpZ\nlfP4HU4weQKBgQD18WaekBVPu31tedb8XB/c6fZ/NTN5+iT/ni374F8vwGq+L8ZU\n5F8Ggns+fCgYus1EYpJm4NMlqLANYsgi5Xem12Oaq1wuBfmPxN98OL5vP5FyNyMW\n/bS2hgHJokVuPid4+yuGSsu4zQgRted80+eYA1QzPAsoqlGGBVzFc/yktQKBgQDP\nRcqHPFV7Tfn+vkk8bEf4BR4KNKWJZXqeCONQSEboJM3axQ9njXN73iR5qRkW/Z99\nWwy6P/wAy1SIqEImf3y9v3tHI1BxIO4xKEr1EqjGarFqS9Rod0tACRc/cPwf6DZQ\n5R1+z3AyMiLFYOUnFZcOdGz9RmA5aeZ9XWuHSDWimQKBgGgmRWuGasEEMXdnkLQA\nrNg1Di5DFv+KvXwgTo63MxwBs2olQ7jUsFf8khipqpByGazYgGeEa1RxDGpQrdyO\nI/5N3d5VcGW4g9obfdexuuKOloyKRS2N0KNhLfEfb+qr4gRACPpyKnj5Jeohliox\nbHieUzx8qriZ8KrD3PbjKqap\n-----END PRIVATE KEY-----\n`,\n        setup () {\n            // Make the hub\n            var hub = require('../node.js')()\n            hub.pid = 'hub'\n            sim.add_peer(hub, 0)\n            this.server = require('../websocket-server.js')(\n                hub,\n                this.certificate,\n                this.private_key\n            )\n\n            // Make the clients\n            var clients = []\n            for (var i = 1; i < sim.n_peers; i++) {\n                var client = require('../node.js')()\n                client.pid = 'C' + i\n                sim.add_peer(client, i)\n                clients.push(client)\n            }\n\n            // Create pipes that connect peers to the hub\n            this.client_pipes = {}\n            for (var i = 0; i < clients.length; i++)\n                this.client_pipes[clients[i].pid] = require('../websocket-client.js')({\n                    node: clients[i],\n                    url: 'ws://localhost:3007/',\n                    prefix: '*'\n                })\n\n            WebSocket = require('ws')\n        },\n        wrapup (cb) {\n            nlog('Wrapping up!')\n\n            // Connect all the pipes together\n            for (var pipe in this.client_pipes)\n                if (!this.client_pipes[pipe].enabled())\n                    this.client_pipes[pipe].enable()\n\n            // Make a joiner after a delay\n            setTimeout(make_joiner, 100)\n\n            // And be done after another one\n            setTimeout(cb, 200)\n\n            function make_joiner () {\n                var i = Math.floor(sim.rand() * sim.n_peers)\n                var p = sim.peers[i]\n                \n                notes = ['creating joiner']\n\n                // Create it!\n                p.set('my_key', null, [])\n                \n                sim.vis.add_frame({\n                    peer_notes: {[p.pid]: notes},\n                    peers: sim.peers.map(x => JSON.parse(JSON.stringify(x)))\n                })\n            }\n        },\n        die (cb) {\n            // Disable the clients\n            for (var k in this.client_pipes)\n                this.client_pipes[k].disable()\n\n            // Kill the server\n            this.server.dead = true\n            this.server.close(cb)\n        },\n        toggle_pipe () {\n            var pipes = Object.keys(this.client_pipes)\n            var rand_pipe = this.client_pipes[\n                pipes[Math.floor(sim.rand() * pipes.length)]]\n            \n            nlog('toggling', rand_pipe.pipe.id, 'to',\n                 rand_pipe.enabled() ? 'disabled':'enabled')\n            if (rand_pipe.enabled())\n                rand_pipe.disable()\n            else\n                rand_pipe.enable()\n        }\n    }\n)\n"
  },
  {
    "path": "kernel/test/wiki-perf.html",
    "content": "<!DOCTYPE html>\n<html>\n<head>\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">\n<meta charset=\"utf-8\">\n</head>\n<body style=\"margin:0px\"></body>\n<script src=\"../../util/require.js\"></script>\n<script src=\"../../util/diff.js\"></script>\n<script src=\"../../util/utilities.js\"></script>\n<script src=\"../errors.js\"></script>\n<script src=\"../antimatter.js\"></script>\n<script src=\"../node.js\"></script>\n<script src=\"../pipe.js\"></script>\n<script src=\"../store.js\"></script>\n<script src=\"../websocket-client.js\"></script>\n<script src=\"../websocket-server.js\"></script>\n<script src=\"../../sync9/sync9.js\"></script>\n<script>\n\nvar tau = Math.PI * 2\n\ng_log_stuff = false\ng_log_stuff_max_length = 20000 // 150\n\ng_real_clearTimeout = clearTimeout\ng_real_setTimeout = setTimeout\n\ng_got_local_ack = false\ng_got_global_ack = false\n\nasync function main() {\n    var d = make_html(`<div style=\"display:grid;grid-template-columns:200px 1fr;width:100%;height:100%\"></div>`)\n    document.body.append(d)\n\n    var side_panel = make_html(`<div style=\"background:orange\">HELLO</div>`)\n    d.append(side_panel)\n\n    var spark_panel = make_html(`<div style=\"width:100%;position:relative\"></div>`)\n    d.append(spark_panel)\n\n    var stats = {\n        clock: 1,\n        time: 1,\n        full_acks: 1,\n        server_versions: 1,\n        server_fissures: 1,\n        num_active_servers: 10,\n        num_clients: 10\n    }\n    var stat_overlays = {\n        server_fissures: ['server_broken_fissures'],\n        num_clients: ['num_active_clients', 'num_really_active_clients']\n    }\n    var after_bottom_sl_y = 0\n    Object.entries(stats).forEach(([k, v]) => {\n        var sl = create_sparkline(k, spark_panel.offsetWidth, 60, v)\n        spark_panel.append(sl)\n        sl.style.marginBottom = '19px'\n        stats[k] = sl\n        after_bottom_sl_y = sl.offsetTop + sl.offsetHeight\n    })\n\n    var message_d = make_html(`<div style=\"background:yellow;height:48px;width:${spark_panel.offsetWidth}px;overflow:scroll\">messages will appear here..</div>`)\n    spark_panel.append(message_d)\n\n    var time_dag_c = make_html(`<canvas></canvas>`)\n    var time_dag_g = time_dag_c.getContext('2d')\n\n    spark_panel.append(time_dag_c)\n\n    var params = {\n        rand_seed: 'hi_world',\n\n        server_off_time: [0, 2000],\n        server_on_time: [0, 10 * 1000],\n\n        max_clients: 5,\n        create_client_period: [200, 700],\n        client_on_time: [0, 3000],\n        client_off_time: [0, 2000],\n        client_death_delay: [1 * 1000, 10 * 1000],\n        client_send_forget_chance: 0.5,\n        client_send_deletes_chance: 0.5,\n        client_active_period: [100, 500],\n        client_idle_period: [100, 500],\n        client_edit_period: [100, 500],\n        max_edit_size: 5,\n\n        socket_up_delay: [25, 150],\n        socket_down_delay: [25, 150],\n        socket_close_delay: [0, 1000],\n\n        pump_delay: 30\n    }\n    Object.assign(params, JSON.parse(localStorage.wiki_perf_li7fl3drudc || '{}'))\n    localStorage.wiki_perf_li7fl3drudc = JSON.stringify(params)\n\n    Object.entries(params).forEach(([k, v]) => {\n        var label = make_html(`<div></div>`)\n        label.textContent = k\n        side_panel.append(label)\n\n        var input = make_html(`<input style=\"width:100%\"></input>`)\n        input.value = v\n        side_panel.append(input)\n        input.oninput = () => {\n            if (Array.isArray(v)) {\n                params[k] = input.value.split(/,/).map(x => 1*x.trim())\n            } else if (typeof(v) == 'number') {\n                params[k] = 1*input.value.trim()\n            } else {\n                params[k] = input.value.trim()\n            }\n            localStorage.wiki_perf_li7fl3drudc = JSON.stringify(params)\n        }\n        input.onchange = () => input.value = params[k]\n    })\n\n    var pump = null\n    var pumper = null\n    var pump_timer = null\n\n    var history = []\n    var show_time = -1\n\n    function draw_dags() {\n        var resource = show_time >= 0 ? history[show_time] : history[history.length - 1]\n        if (show_time <= 0) time_line_d.style.left = (history.length - 1) + 'px'\n\n        var c = time_dag_c\n        var g = time_dag_g\n\n        if (!resource) {\n            g.fillStyle = 'rgba(245, 235, 255, 0.8)'\n            g.fillRect(0, 0, c.width, c.height)\n            return\n        }\n\n        message_d.textContent = `i=${show_time}, ` + resource.message + ' -- ' + JSON.stringify(resource.acked_boundary)\n\n        var size = calc_time_dag_size(resource.time_dag, 18)\n\n        c.width = spark_panel.offsetWidth * devicePixelRatio\n        c.height = Math.max(size.h + 50, c.height)\n        c.style.width = (c.width / devicePixelRatio) + 'px'\n        c.style.height = (c.height / devicePixelRatio) + 'px'\n\n        g.fillStyle = 'rgba(245, 235, 255, 0.8)'\n        g.fillRect(0, 0, c.width, c.height)\n\n        draw_time_dag(c, g, resource.time_dag, resource.acked_boundary, resource.acks_in_process, resource.fissures, 0, 0, c.width/2, c.height, 18)\n\n        try {\n            var S = resource.space_dag.S.elems[0].S.text.S.elems[0]\n            draw_space_dag(c, g, S, c.width/2, 0)\n        } catch (e) {}\n    }\n\n    function on_restart() {\n        if (pump_timer) g_real_clearTimeout(pump_timer)\n        pump = create_experiment(params)\n        pumper = () => {\n            var results = pump()\n\n            var dx = 0\n            Object.entries(stats).forEach(([k, sl]) => {\n                var args = [results[k]]\n                if (stat_overlays[k]) stat_overlays[k].forEach(k => args.push(results[k]))\n                dx = sl.update(...args)\n            })\n\n            if (results.server_res) results.server_res.message = results.message\n            history.push(results.server_res || null)\n            if (dx < 0) for (var i = 0; i > dx; i--) history.shift()\n            if (dx < 0 && show_time >= 0) update_time(show_time + dx)\n            draw_dags()\n\n            pump_timer = g_real_setTimeout(pumper, params.pump_delay)\n        }\n        pumper()\n    }\n\n    function on_resume() {\n        if (!pump) on_restart()\n        else if (!pump_timer) pump_timer = g_real_setTimeout(pumper, params.pump_delay)\n    }\n\n    function on_pause() {\n        if (pump_timer) g_real_clearTimeout(pump_timer)\n        pump_timer = null\n    }\n\n    var go_button = make_html(`<button style=\"width:100%;height:60px;margin-top:18px\">GO</button>`)\n    side_panel.append(go_button)\n    go_button.onclick = () => {\n        if (pump_timer) {\n            on_pause()\n            go_button.textContent = 'RESUME'\n        } else {\n            on_resume()\n            go_button.textContent = 'PAUSE'\n        }\n    }\n\n    var restart_button = make_html(`<button style=\"width:100%;height:60px;margin-top:18px\">RESTART</button>`)\n    side_panel.append(restart_button)\n    restart_button.onclick = () => {\n        on_restart()\n        go_button.textContent = 'PAUSE'\n    }\n\n    var time_line_d = make_html(`<div style=\"width:${1}px;height:${after_bottom_sl_y}px;background:red;position:absolute;left:0px;top:0px\"></div>`)\n    spark_panel.append(time_line_d)\n    spark_panel.onmousemove = spark_panel.onmousedown = e => {\n        var r = spark_panel.getBoundingClientRect()\n        var x = e.clientX - r.left\n        if (e.buttons) update_time(x)\n    }\n\n    function update_time(t) {\n        time_line_d.style.left = t + 'px'\n        show_time = t\n        draw_dags()\n    }\n\n    var follow_button = make_html(`<button style=\"width:100%;height:60px;margin-top:18px\">FOLLOW ALONG</button>`)\n    side_panel.append(follow_button)\n    follow_button.onclick = () => {\n        console.log('got here??')\n        show_time = -1\n        draw_dags()\n    }\n}\n\nrequire('../util/utilities.js')\nvar ds = require('../util/diff.js')\n\nvar page_key = '/foo'\n\nvar next_msg_id = 0\n\ndebug_WSS = function () {\n    return debug_WSS.the_one = {\n        on_conns: [],\n        on(event_type, func) {\n            if (event_type == 'connection') this.on_conns.push(func)\n            else throw 'bad'\n        },\n        ws_array: [],\n        close() {\n            this.ws_array.slice(0).forEach(ws => ws.terminate())\n            debug_WSS.the_one = null\n        }\n    }\n}\n\ndebug_WS = function (id, socket_up_delay, socket_down_delay, socket_close_delay) {\n    var prev_up = 0\n    var prev_down = 0\n\n    var self = {\n        id,\n        on_messages: [],\n        on_closes: [],\n        is_open: true,\n        send(msg) {\n            var t = Date.now()\n            var d = rand_range(socket_up_delay)\n            if (t + d < prev_up) d = prev_up + 0.1 - t\n\n            var msg_id = 'MSSSSG::' + next_msg_id++ // Math.random().toString(36).slice(2)\n            g_log_stuff && console.log(`client send C-${id} (t=${t}:t2=${t + d}:d=${d}:id=${msg_id}): ` + msg.slice(0, g_log_stuff_max_length))\n\n\n            setTimeout(() => {\n                if (!self.is_open) { return }\n\n                g_log_stuff && console.log(`server recv C-${id} (t=${Date.now()}:id=${msg_id}): ` + msg.slice(0, g_log_stuff_max_length))\n\n                this.on_messages.forEach(f => f(msg))\n            }, d, 'server receive: ' + msg)\n\n            prev_up = t + d\n        },\n        terminate() {\n            if (!self.is_open) { return }\n            self.is_open = false\n\n            var msg_id = 'MSSSSG::' + next_msg_id++\n            g_log_stuff && console.log(`socket terminated part 1 for C-${id} (id=${msg_id})`)\n\n            setTimeout(() => {\n\n                g_log_stuff && console.log(`socket terminated part 2 for C-${id} (id=${msg_id})`)\n    \n                this.onclose && this.onclose()\n                this.on_closes.forEach(f => f())\n                this.on_closes = []\n                this.on_messages = []\n                if (debug_WSS.the_one) {\n                    let i = debug_WSS.the_one.ws_array.indexOf(self)\n                    if (i >= 0) debug_WSS.the_one.ws_array.splice(i, 1)\n                }    \n            }, rand_range(socket_close_delay), 'websocket terminate')\n        }\n    }\n    self.close = self.terminate\n\n    setTimeout(() => {\n        if (debug_WSS.the_one) {\n            debug_WSS.the_one.ws_array.push(self)\n            debug_WSS.the_one.on_conns.forEach(f => {\n                f({\n                    on(event_type, func) {\n                        if (event_type == 'message') self.on_messages.push(func)\n                        else if (event_type == 'close') self.on_closes.push(func)\n                        else throw 'unknown event_type: ' + event_type\n                    },\n                    send(msg) {\n                        var t = Date.now()\n                        var d = rand_range(socket_down_delay)\n                        if (t + d < prev_down) d = prev_down + 0.1 - t\n\n                        var msg_id = 'MSSSSG::' + next_msg_id++ // Math.random().toString(36).slice(2)\n\n                        g_log_stuff && console.log(`server send C-${id} (t=${t}:t2=${t + d}:d=${d}:id=${msg_id}): ` + msg.slice(0, g_log_stuff_max_length))\n\n                        setTimeout(() => {\n                            if (!self.is_open) { return }\n\n                            g_log_stuff && console.log(`client recv C-${id} (t=${Date.now()}:id=${msg_id}): ` + msg.slice(0, g_log_stuff_max_length))\n\n                            self.onmessage({data: msg})\n                        }, d, 'client receive: ' + msg)\n\n                        prev_down = t + d\n                    },\n                    terminate() { self.terminate() }\n                }, {socket: {remoteAddress: 'tester.fake.ip.address'}})\n            })\n            self.onopen && self.onopen()\n        } else {\n            self.terminate()\n        }\n    }, 0, 'new websocket')\n\n    return self\n}\n\ng_profile = {\n    keys: {},\n    begin(key) {\n        if (!this.keys[key]) this.keys[key] = {count: 0, time: 0}\n        if (this.keys[key].begin != null) throw 'unbalanced begin! key: ' + key\n        this.keys[key].begin = performance.now()\n    },\n    end(key) {\n        if (!this.keys[key]) throw 'unbalanced end! key: ' + key\n        this.keys[key].time += performance.now() - this.keys[key].begin\n        delete this.keys[key].begin\n        this.keys[key].count++\n    },\n    mark(key) {\n        if (!this.keys[key] || this.keys[key].begin == null) this.begin(key)\n        else this.end(key)\n    },\n    print() {\n        Object.entries(this.keys).forEach(([k, v]) => {\n            console.log(`${k}\\t${v.time / v.count}\\t${v.time}\\t${v.count}`)\n        })\n    }\n}\n\nfunction create_experiment(params) {\n    Math.randomSeed(params.rand_seed)\n\n    debug_WSS.the_one = null\n\n    var db = create_db()\n    var server = create_server(db)\n    var clients = []\n\n    var t = 0\n    Date.now = () => t\n\n    var events = []\n    var next_timer_id = 1\n    setTimeout = (f, tt, msg) => {\n        var id = next_timer_id++\n        events.push([t + tt, f, id, msg])\n        return id\n    }\n\n    clearTimeout = (id) => {\n        events = events.filter(x => x[2] != id)\n    }\n\n    create_client_cron()\n    function create_client_cron() {\n        setTimeout(() => {\n            if (clients.length < params.max_clients) {\n                var c = create_client(params)\n                clients.push(c)\n\n                g_log_stuff && console.log(`created client (t=${Date.now()}): C-` + c.id)\n\n                set_on_timeout()\n                function set_on_timeout() {\n                    setTimeout(() => {\n                        if (clients.indexOf(c) >= 0) {\n                            if (!c.is_open) throw 'bad'\n\n                            g_log_stuff && console.log(`closing client (t=${Date.now()}): C-` + c.id)\n\n                            c.close(false, false)\n                            setTimeout(() => {\n                                if (clients.indexOf(c) >= 0) {\n                                    if (c.is_open) throw 'bad'\n\n                                    g_log_stuff && console.log(`openning client (t=${Date.now()}): C-` + c.id)\n\n                                    c.open()\n                                    set_on_timeout()\n                                }\n                            }, rand_range(params.client_off_time), 'open client')\n                        }\n                    }, rand_range(params.client_on_time), 'close client')\n                }\n\n                setTimeout(() => {\n\n                    g_log_stuff && console.log(`killing client: (t=${Date.now()}) C-` + c.id)\n\n                    var ci = clients.indexOf(c)\n                    if (ci < 0) throw 'bad'\n                    if (c.is_open) {\n                        var send_forget = Math.random() < params.client_send_forget_chance\n                        var send_deletes = send_forget || Math.random() < params.client_send_deletes_chance\n                        c.close(send_deletes, send_forget)\n                    }\n                    clients.splice(ci, 1)\n                }, rand_range(params.client_death_delay), 'kill client')\n\n                c.active = Math.random() < 0.5\n                set_active_timeout()\n                function set_active_timeout() {\n                    if (c.active) {\n                        setTimeout(() => {\n                            c.active = false\n                            set_active_timeout()\n                        }, rand_range(params.client_active_period), 'client idle')\n                    } else {\n                        setTimeout(() => {\n                            c.active = true\n                            set_active_timeout()\n                        }, rand_range(params.client_idle_period), 'client active')\n                    }\n                }\n\n                set_edit_timeout()\n                function set_edit_timeout() {\n                    setTimeout(() => {\n                        if (clients.indexOf(c) >= 0) {\n                            if (c.active) {\n                                g_log_stuff && console.log(`editing: (t=${Date.now()}) C-` + c.id)\n\n                                let text = c.get()\n                                let start = Math.floor(Math.random() * (text.length + 1))\n\n                                let max_len = text.length - start + 1\n                                if (max_len > params.max_edit_size) max_len = params.max_edit_size\n                                let len = Math.floor(Math.random() * max_len)\n\n                                let ins_size = Math.floor(Math.random() * params.max_edit_size)\n                                if (ins_size == 0 && len == 0) ins_size = 1\n                                let ins = String.fromCharCode(65 + Math.floor(Math.random() * 26)).repeat(ins_size)\n\n                                c.set(start, len, ins)\n                            }\n                            set_edit_timeout()\n                        }\n                    }, rand_range(params.client_edit_period), 'client edit')\n                }                    \n            }\n            create_client_cron()\n        }, rand_range(params.create_client_period), 'create client')\n    }\n\n    toggle_server_cron()\n    function toggle_server_cron() {\n        if (server) {\n            setTimeout(() => {\n\n                g_log_stuff && console.log(`closing server (t=${Date.now()})`)\n\n                server.close()\n                server = null\n                toggle_server_cron()\n            }, rand_range(params.server_on_time), 'close server')\n        } else {\n            setTimeout(() => {\n\n                g_log_stuff && console.log(`opening server (t=${Date.now()})`)\n\n                server = create_server(db)\n                toggle_server_cron()\n            }, rand_range(params.server_off_time), 'open server')\n        }\n    }\n\n    var prev_server_versions = 0\n    var prev_server_fissed_versions = 0\n    var prev_server_fissures = 0\n    var prev_server_broken_fissures = 0\n\n    return () => {\n        var e = events.sort((a, b) => a[0] - b[0]).shift()\n        t = e[0]\n\n        var st = performance.now()\n        e[1]()\n        var et = performance.now()\n\n        if (server) {\n            prev_server_versions = Object.keys(server.node.resource_at(page_key).time_dag).length\n\n            let versions = Object.fromEntries(Object.keys(server.node.resource_at(page_key).time_dag).map(x => [x, true]))\n            let fissed_versions = {}\n            Object.values(server.node.resource_at(page_key).fissures).forEach(f => {\n                Object.keys(f.versions).forEach(v => {\n                    if (versions[v]) fissed_versions[v] = true\n                })\n            })\n            prev_server_fissed_versions = Object.keys(fissed_versions).length\n\n            let fissures = server.node.resource_at(page_key).fissures\n\n            prev_server_fissures = Object.keys(fissures).length\n\n            prev_server_broken_fissures = Object.values(fissures).filter(f => !fissures[f.b + ':' + f.a + ':' + f.conn]).length\n        }\n\n        var full_acks = g_got_global_ack ? 1 : g_got_local_ack ? 0.5 : 0\n        g_got_local_ack = false\n        g_got_global_ack = false\n\n        return {\n            message: e[3],\n            clock: (t % 1000)/1000,\n            time: et - st,\n            full_acks,\n            server_versions: prev_server_versions,\n            server_fissed_versions: prev_server_fissed_versions,\n            server_fissures: prev_server_fissures,\n            server_broken_fissures: prev_server_broken_fissures,\n            num_active_servers: server ? 1 : 0,\n            num_clients: clients.length,\n            num_active_clients: clients.filter(x => x.is_open).length,\n            num_really_active_clients: clients.filter(x => x.active).length,\n\n            server,\n            clients,\n            server_res: server && JSON.parse(JSON.stringify(server.node.resource_at(page_key)))\n        }\n    }\n}\n\nfunction create_db() {\n    return g_db = {\n        data: {},\n        get(key) {\n            return this.data[key]\n        },\n        set(key, val) {\n            this.data[key] = val\n        },\n        del(key) {\n            delete this.data[key]\n        },\n        list_keys() {\n            return Object.keys(this.data)\n        }\n    }\n}\n\nfunction create_server(db) {\n    var node = require('../braid.js')()\n    node.fissure_lifetime = 1000 * 1000\n    // node.max_fissures = 10\n    require('../util/store.js')(node, db)\n\n    node.on_errors.push((key, origin) => {\n        node.unbind(key, origin)\n    })\n\n    node.ons.push((type, args) => {\n        if (type == 'ack') {\n            if (args.seen == 'local') g_got_local_ack = true\n            else g_got_global_ack = true\n        }\n    })\n\n    var wss = require('../protocol-websocket/websocket-server.js')(node, {wss: new debug_WSS()})\n\n    return {\n        node,\n        get() {\n            var o = node.resource_at(page_key).mergeable.read()\n            return o && o.text\n        },\n        close() {\n            wss.dead = true\n            wss.close()\n        }\n    }\n}\n\nfunction create_client(params) {\n    var node = require('../braid.js')()\n    node.default(page_key, {cursors: {[node.pid]: {start: 0, end: 0, time: Date.now()}}, text: ''})\n    var ws_client = require('../protocol-websocket/websocket-client.js')({node, create_websocket: () => {\n        return debug_WS(node.pid, params.socket_up_delay, params.socket_down_delay, params.socket_close_delay)\n    }})\n\n    var cursor_lifetime = 10000\n\n    var ready = false\n    var text = ''\n    var selectionStart = 0\n    var selectionEnd = 0\n\n    function send_diff(from, to) {\n        var v = node.set(page_key, null, ds.diff_convert_to_my_format(ds.diff_main(from, to)).map(x =>\n            `.text[${x[0]}:${x[0] + x[1]}] = ${JSON.stringify(x[2])}`\n        ))\n    }\n\n    function send_cursor_update(start, end) {\n        node.set(page_key, null, [`.cursors[${JSON.stringify(node.pid)}] = ${JSON.stringify({start: {type: 'location', path: `.text[${start}]`}, end: {type: 'location', path: `.text[${end}]`}, time: Date.now()})}`])\n    }\n\n    var cb = x => {\n        ready = true\n        text = x.text\n        if (x.cursors[node.pid]) {\n            selectionStart = x.cursors[node.pid].start\n            selectionEnd = x.cursors[node.pid].end\n        }\n    }\n    node.get(page_key, cb)\n\n    node.ons.push((method, arg) => {\n        if (method != 'welcome' && method != 'fissure') return\n        if (arg.key != page_key) return\n\n        var fs = {}\n        if (method == 'welcome') {\n            for (let f of arg.fissures)\n                fs[`${f.a}:${f.b}:${f.conn}`] = f\n        } else {\n            let f = arg.fissure\n            fs[`${f.a}:${f.b}:${f.conn}`] = f\n        }\n\n        var rest = () => {\n            var o = node.resource_at(page_key).mergeable.read()\n            if (!o || !o.cursors) return\n\n            Object.assign(fs, node.resource_at(page_key).fissures)\n    \n            var delete_us = {}\n            Object.values(fs).forEach(f => {\n                if (!fs[`${f.b}:${f.a}:${f.conn}`]) {\n                    if (o.cursors[f.b]) delete_us[f.b] = true\n                }\n            })\n\n            var now = Date.now()\n            Object.entries(o.cursors).forEach(([k, v]) => {\n                if (k != node.pid && v.time <= now - cursor_lifetime) delete_us[k] = true\n            })\n\n            var patches = Object.keys(delete_us).map(k => `delete .cursors[${JSON.stringify(k)}]`)\n            if (patches.length) node.set(page_key, null, patches)\n        }\n        setTimeout(rest, 0, 'node ' + method + ':' + JSON.stringify(arg))\n    })\n\n    node.on_errors.push((key, origin) => {\n        // console.log('CLIENT ON ERROR')\n\n        text = ''\n        selectionStart = 0\n        selectionEnd = 0\n\n        ready = false\n\n        delete node.resources[key]\n        node.unbind(key, origin)\n\n        var subscribe = ws_client.pipe.subscribed_keys[key].we_requested\n        delete ws_client.pipe.subscribed_keys[key].we_requested\n\n        ws_client.pipe.send({\n            key,\n            subscribe,\n            method: 'get'\n        })\n    })\n\n    var self\n    return self = {\n        id: node.pid,\n        node,\n        is_open: true,\n        get: () => {\n            return text\n        },\n        set: (x, del, ins) => {\n            if (!ready) return\n            var new_text = text.slice(0, x) + ins + text.slice(x + del)\n            send_diff(text, new_text)\n            if (x + ins.length <= new_text.length)\n                send_cursor_update(x + ins.length, x + ins.length)\n            else\n                send_cursor_update(new_text.length, new_text.length)\n        },\n        close: (send_deletes, send_forget) => {\n            if (ready && send_deletes) node.set(page_key, null, [`delete .cursors[${JSON.stringify(node.pid)}]`])\n            if (send_forget) node.forget(page_key, cb)\n            ws_client.disable()\n            self.is_open = false\n        },\n        open: () => {\n            ws_client.enable()\n            self.is_open = true\n        }\n    }    \n}\n\nfunction create_sparkline(text, w, h, max_y) {\n    var d = make_html(`<div></div>`)\n\n    var label = make_html(`<div></div>`)\n    d.append(label)\n    function update_label() { label.textContent = text + ', max_y = ' + max_y }\n    update_label()\n\n    var dd = make_html(`<div style=\"position:relative;width:${w}px;height:${h}px\"></div>`)\n    d.append(dd)\n    \n    if (true) {\n        let back = make_html(`<canvas style=\"width:${w}px;height:${h}px;position:absolute;left:0px;top:0px\"></canvas>`)\n        back.width = w * devicePixelRatio\n        back.height = h * devicePixelRatio\n        dd.append(back)\n        \n        let g = back.getContext('2d')\n        let N = 10\n        for (var i = 0; i <= N; i++) {\n            g.fillStyle = 'rgba(0, 0, 0, 0.25)'\n            g.fillRect(0, back.height - 1 - Math.round(back.height / N * i) + (i == N ? 1 : 0), back.width, 1)\n        }\n    }\n    \n    var front = make_html(`<canvas style=\"width:${w}px;height:${h}px;position:absolute;left:0px;top:0px\"></canvas>`)\n    front.width = w\n    front.height = h * devicePixelRatio\n    dd.append(front)\n    \n    var front_copy = make_html(`<canvas></canvas>`)\n    front_copy.width = front.width\n    front_copy.height = front.height\n    \n    var x = 0\n    \n    d.update = (...args) => {\n        var g = front.getContext('2d')\n\n        args.forEach((y, i) => {\n            while (y > max_y) {\n                let gg = front_copy.getContext('2d')\n                gg.clearRect(0, 0, front.width, front.height)\n                gg.drawImage(front, 0, 0)\n                g.clearRect(0, 0, front.width, front.height)\n                g.drawImage(front_copy, 0, front.height / 2, front.width, front.height / 2)\n                \n                max_y *= 2\n                update_label()\n            }\n            \n            g.fillStyle = ['rgba(255, 128, 0, 0.5)', 'rgba(255, 0, 255, 0.5)', 'rgba(0, 0, 255, 0.5)'][i % 3]\n            \n            var h = lerp(0, 0, max_y, front.height, y)\n            g.fillRect(x, front.height - h, 1, h)\n        })\n\n        x++\n\n        if (x >= front.width) {\n            let gg = front_copy.getContext('2d')\n            gg.clearRect(0, 0, front.width, front.height)\n            gg.drawImage(front, 0, 0)\n            g.clearRect(0, 0, front.width, front.height)\n            g.drawImage(front_copy, -1, 0, front.width, front.height)\n            x--\n            return -1\n        }\n        return 0\n    }\n    \n    return d\n}\n\nfunction make_html(html) {\n    var d = document.createElement('div')\n    d.innerHTML = html\n    return d.firstChild\n}\n\nfunction rand_range(a) {\n    return lerp(0, a[0], 1, a[1], Math.random())\n}\n\nfunction lerp(t0, v0, t1, v1, t) { return (t - t0) * (v1 - v0) / (t1 - t0) + v0 }\n\nfunction calc_time_dag_size(T, r) {\n    var vs = {}\n    function get_layer(v) {\n        if (!vs[v]) vs[v] = {vid: v}\n        if (vs[v].layer) return vs[v].layer\n        return vs[v].layer = Object.keys(T[v]).reduce((x, p) => {\n            return Math.max(x, get_layer(p) + 1)\n        }, 0)\n    }\n    Object.keys(T).forEach(get_layer)\n    \n    var layer_members = {}\n    var num_layers = 0\n    Object.values(vs).forEach(v => {\n        layer_members[v.layer] = layer_members[v.layer] || []\n        layer_members[v.layer].push(v.vid)\n        \n        if (v.layer >= num_layers) num_layers = v.layer + 1\n    })\n    \n    Object.values(layer_members).forEach(layer => {\n        layer.sort().forEach((v, i) => {\n            vs[v].layer_i = i\n        })\n    })\n\n    var max_x = 0\n    var max_y = 0\n    Object.values(vs).forEach(v => {\n        max_x = Math.max(max_x, v.layer_i + 1)\n        max_y = Math.max(max_y, r + (v.layer * r*3))\n    })\n    return {w: max_x, h: max_y}\n}\n\nfunction draw_time_dag(c, g, T, ack_leaves, acks_in_process, fissures, x, y, w, h, r) {\n    g.lineWidth = 3\n    \n    var vs = {}\n    function get_layer(v) {\n        if (!vs[v]) vs[v] = {vid: v}\n        if (vs[v].layer) return vs[v].layer\n        return vs[v].layer = Object.keys(T[v]).reduce((x, p) => {\n            return Math.max(x, get_layer(p) + 1)\n        }, 0)\n    }\n    Object.keys(T).forEach(get_layer)\n    \n    var layer_members = {}\n    var num_layers = 0\n    Object.values(vs).forEach(v => {\n        layer_members[v.layer] = layer_members[v.layer] || []\n        layer_members[v.layer].push(v.vid)\n        \n        if (v.layer >= num_layers) num_layers = v.layer + 1\n    })\n    \n    Object.values(layer_members).forEach(layer => {\n        layer.sort().forEach((v, i) => {\n            vs[v].layer_i = i\n        })\n    })\n\n    function get_node_pos(v) {\n        var layer_count = layer_members[v.layer].length\n        return [\n            lerp(0, x + r, layer_count + 1, x + w - r, v.layer_i + 1),\n            y + r + (v.layer * r*3)\n        ]\n    }\n\n    Object.entries(vs).forEach(e => {\n        var a_pos = get_node_pos(e[1])\n        g.beginPath()\n        Object.keys(T[e[0]]).forEach(p => {\n            g.moveTo(a_pos[0], a_pos[1])\n            \n            var b_pos = get_node_pos(vs[p])\n            g.lineTo(b_pos[0], b_pos[1])\n        })\n        g.strokeStyle = 'lightblue'\n        g.stroke()\n    })\n    \n    var fully_acked = {}\n    function mark_fully_acked_rec(v) {\n        if (!fully_acked[v]) {\n            fully_acked[v] = true\n            Object.keys(T[v]).forEach(mark_fully_acked_rec)\n        }\n    }\n    Object.keys(ack_leaves).forEach(mark_fully_acked_rec)\n    \n    Object.entries(vs).forEach(e => {\n        var node_pos = get_node_pos(e[1])\n        \n        g.beginPath()\n        g.arc(node_pos[0], node_pos[1], r, 0, tau)\n        g.fillStyle = 'white'\n        g.fill()\n\n\n\n\n        \n        // if (acks_in_process[e[0]]) {\n        //     var current_count = Math.max(0, acks_in_process[e[0]].count)\n        //     var max_count = 0\n        //     var search_i = fi\n        //     try {\n        //         let x = null\n        //         while (x = frames[search_i].peers[pi].keys.my_key.phase_one[e[0]]) {\n        //             max_count = x.count\n        //             search_i--\n        //         }\n        //     } catch (e) {}\n            \n        //     var percent_done = (max_count - current_count) / max_count\n        //     if (percent_done > 0) {\n        //         g.beginPath()\n        //         g.arc(node_pos[0], node_pos[1], r, 0, tau/2, true)\n        //         if (percent_done == 1) {\n        //             g.arc(node_pos[0], node_pos[1], r, tau/2, 0, true)\n        //         } else if (percent_done < 0.5) {\n        //             var x = lerp(0, r, 0.5, 0, percent_done)\n        //             var C = (r*r - x*x) / (2*x)\n        //             var angle = Math.atan2(r, C)\n        //             g.arc(node_pos[0], node_pos[1] + C, C + x, tau*3/4 - angle, tau*3/4 + angle)\n        //         } else if (percent_done > 0.5) {\n        //             var x = lerp(0.5, 0, 1, r, percent_done)\n        //             var C = (r*r - x*x) / (2*x)\n        //             var angle = Math.atan2(r, C)\n        //             g.arc(node_pos[0], node_pos[1] - C, C + x, tau/4 - angle, tau/4 + angle)\n        //         } else {\n        //             g.arc(node_pos[0], node_pos[1] + C, C + x, 0, tau)\n        //         }\n        //         g.fillStyle = 'lightblue'\n        //         g.fill()\n        //     }\n        // }\n\n\n\n\n        \n        g.beginPath()\n        g.arc(node_pos[0], node_pos[1], r, 0, tau)\n        if (fully_acked[e[0]]) {\n            g.fillStyle = 'blue'\n            g.fill()\n        } else {\n            g.strokeStyle = 'blue'\n            g.stroke()\n        }\n        \n        draw_text(c, g, e[0].slice(0, 3), node_pos[0] + r, node_pos[1] + r, 'grey', 'left', 'top')\n    })\n    \n    Object.values(fissures).forEach(f => {\n        Object.keys(f.versions).forEach(v => {\n            if (!T[v]) return\n            g.beginPath()\n            \n            var rand = Math.create_rand(f.conn)\n            g.strokeStyle = '#' + rand().toString(16).slice(2, 8)\n            \n            var node_pos = get_node_pos(vs[v])\n            //var rr = r * 1.45\n            var rr = r * (1.2 + rand())\n            \n            g.lineWidth = 5\n            if (f.a < f.b) {\n                g.arc(node_pos[0], node_pos[1], rr, tau/4, tau*3/4)\n            } else {\n                g.arc(node_pos[0], node_pos[1], rr, tau/4, tau*3/4, true)\n            }\n            g.stroke()\n        })\n    })\n    \n}\n\nfunction draw_space_dag(c, g, S, x, y) {\n    function helper(node, y, px, py) {\n        g.beginPath()\n        g.moveTo(x, y)\n        g.lineTo(px, py)\n        g.lineWidth = 1\n        g.strokeStyle = 'lightblue'\n        g.stroke()\n\n        var begin_x\n        var end_x\n        \n        draw_text(c, g, node.version ? node.version.slice(0, 3) : '', x, y + 25, 'grey', 'left', 'middle')\n        \n        var my_text = node.elems + (node.end_cap ? '*' : '')\n\n        draw_text(c, g, my_text, x, y, Object.keys(node.deleted_by).length > 0 ? 'red' : 'blue', 'left', 'middle', '20px Arial')\n        \n        var width = g.measureText(my_text).width\n        x += width\n\n        var px = x\n        x += 10\n        for (var n of node.nexts) helper(n, y + 40, px, y)\n        if (node.next) helper(node.next, y, px, y)\n    }\n    if (!S) return\n    else if (typeof(S) == 'string') helper({\n        version : 'N/A',\n        elems : S,\n        deleted_by : {},\n        nexts : []\n    })\n    else if (S.t == 'lit') helper({\n        version : 'N/A',\n        elems : S.S,\n        deleted_by : {},\n        nexts : []\n    })\n    else helper(S.S, y, x, y)\n}\n\nfunction draw_text(c, g, text, x, y, color, x_align, y_align, font) {\n    g.font = font || '15px Arial'\n    if (color) g.fillStyle = color\n    g.textAlign = x_align || 'left'\n    g.textBaseline = y_align || 'middle'\n    g.fillText(text, x, y)\n}\n\nmain()\n\n</script>\n</html>\n"
  },
  {
    "path": "kernel/test/wiki-tester.js",
    "content": "\nrequire('../../util/utilities.js')\n\nvar page_key = '/foo'\ng_current_server = null\n\ng_debug_WS_messages = []\ng_debug_WS_messages_delayed = []\ndebug_WS_process_messages = function () {\n    while (g_debug_WS_messages.length) {\n        g_debug_WS_messages.shift()()\n    }\n    g_debug_WS_messages = g_debug_WS_messages_delayed\n    g_debug_WS_messages_delayed = []\n}\n\ndebug_WSS = function () {\n    return debug_WSS.the_one = {\n        on_conns: [],\n        on(event_type, func) {\n            if (event_type == 'connection') this.on_conns.push(func)\n            else throw 'bad'\n        },\n        ws_array: [],\n        close() {\n            this.ws_array.forEach(ws =>\n                g_debug_WS_messages.push(() => {\n\n                    // console.log(`SERVER CLOSING C-${ws.id}`)\n\n                    ws.onclose && ws.onclose()\n                }))\n            debug_WSS.the_one = null\n        }\n    }\n}\n\ndebug_WS = function (id) {\n\n    // console.log(`C-${id} ATTEMPTING CONNECTING TO SERVER`)\n\n    var self = {\n        id,\n        on_messages: [],\n        on_closes: [],\n        is_open: true,\n        send(msg) {\n\n            // var m = JSON.parse(msg)\n            // console.log(`C-${self.id} SEND: ` + m.method + ' ' + (m.seen || ''))\n            // if (m.versions) console.log('versions: ', m.versions)\n            // if (m.patches) console.log('version: ', m.version, m.parents, m.patches)\n\n            // console.log(`C-${self.id} SEND: ` + JSON.stringify(JSON.parse(msg), null, '    '))\n\n            this.on_messages.forEach(f =>\n                g_debug_WS_messages.push(() => {\n\n                    // console.log(`S RECV from:C-${self.id} : ` + m.method + ' ' + (m.seen || ''))\n                    // if (m.versions) console.log('versions: ', m.versions)\n                    // if (m.patches) console.log('version: ', m.version, m.parents, m.patches)\n\n                    // if (!self.is_open) console.log('NOT OPEN!')\n                    // console.log(`S RECV from:C-${self.id} : ` + JSON.stringify(JSON.parse(msg), null, '    '))\n        \n                    f(msg)\n                }))\n        },\n        terminate() {\n            if (!self.is_open) throw 'closing closed socket'\n            self.is_open = false\n\n            // console.log(`CLOSING C-${self.id}`)\n\n            g_debug_WS_messages.push(() =>\n                this.onclose && this.onclose())\n            this.on_closes.forEach(f =>\n                g_debug_WS_messages.push(() => f()))\n            this.on_closes = []\n            this.on_messages = []\n            if (debug_WSS.the_one)\n                debug_WSS.the_one.ws_array.splice(debug_WSS.the_one.ws_array.indexOf(self), 1)\n        }\n    }\n    self.close = self.terminate\n    g_debug_WS_messages.push(() => {\n        if (debug_WSS.the_one) {\n            debug_WSS.the_one.ws_array.push(self)\n            debug_WSS.the_one.on_conns.forEach(f => {\n\n                // console.log(`C-${self.id} CONNECTING TO SERVER`)\n\n                f({\n                    on(event_type, func) {\n                        if (event_type == 'message') self.on_messages.push(func)\n                        else if (event_type == 'close') self.on_closes.push(func)\n                    },\n                    send(msg) {\n\n                        // var m = JSON.parse(msg)\n                        // console.log(`S SEND to:C-${self.id} : ` + m.method + ' ' + (m.seen || ''))\n                        // if (m.versions) console.log('versions: ', m.versions)\n                        // if (m.patches) console.log('version: ', m.version, m.parents, m.patches)\n\n                        // console.log(`S SEND to:C-${self.id} : ` + JSON.stringify(JSON.parse(msg), null, '    '))\n    \n                        g_debug_WS_messages.push(() => {\n\n                            // console.log(`C-${self.id} RECV: ` + m.method + ' ' + (m.seen || ''))\n                            // if (m.versions) console.log('versions: ', m.versions)\n                            // if (m.patches) console.log('version: ', m.version, m.parents, m.patches)\n\n                            // if (!self.is_open) console.log('NOT OPEN!')\n                            // console.log(`C-${self.id} RECV: ` + JSON.stringify(JSON.parse(msg), null, '    '))\n        \n                            self.onmessage({data: msg})\n                        })\n                    }\n                }, {socket: {remoteAddress: 'fake-ip-address'}})\n            })\n            self.onopen && self.onopen()\n        } else {\n            self.onclose && self.onclose()\n        }\n    })\n    return self\n}\n\nvar ds = require('../../util/diff.js')\nvar performance = require('perf_hooks').performance\n\n\n\ng_profile = {\n    keys: {},\n    begin(key) {\n        if (!this.keys[key]) this.keys[key] = {count: 0, time: 0}\n        if (this.keys[key].begin != null) throw 'unbalanced begin! key: ' + key\n        this.keys[key].begin = performance.now()\n    },\n    end(key) {\n        if (!this.keys[key]) throw 'unbalanced end! key: ' + key\n        this.keys[key].time += performance.now() - this.keys[key].begin\n        delete this.keys[key].begin\n        this.keys[key].count++\n    },\n    mark(key) {\n        if (!this.keys[key] || this.keys[key].begin == null) this.begin(key)\n        else this.end(key)\n    },\n    print() {\n        Object.entries(this.keys).forEach(([k, v]) => {\n            console.log(`${k}\\t${v.time / v.count}\\t${v.time}\\t${v.count}`)\n        })\n    }\n}\n\ng_prune_counter = 0\ng_prune_period = 0\n\n\nasync function main() {\n    // var a = '' + require('fs').readFileSync('actions.json')\n    // a = JSON.parse(a)\n    // run_experiment_from_actions(a)\n\n    // return\n\n    g_profile.begin('whole thing')\n\n    var best_t = Infinity\n    var best_seed = null\n    var exp_time_est = 1\n    var longest = 0\n    var longest_seed = null\n    var N = 2000\n\n    var ST = performance.now()\n\n    var times = []\n\n    for (var i = 0; i < N; i++) {\n\n        let sttt = performance.now()\n        \n\n        var seed = '__acb_def_fff_fF246__:' + i\n\n        // N = 1\n        // seed = '__abb__29:4'\n\n\n        console.log('seed: ' + seed)\n        var st = performance.now()\n\n        var r = await run_experiment(seed)\n\n\n        times.push(performance.now() - sttt)\n\n\n        if (!r.ok && r.t < best_t) {\n            best_t = r.t\n            best_seed = seed\n            require('fs').writeFileSync('actions.json', JSON.stringify(r.actions, null, '    '))\n        }\n        var t = performance.now() - st\n        if (t > longest) {\n            longest = t\n            longest_seed = seed\n        }\n        exp_time_est = 0.9 * exp_time_est + 0.1 * t\n        console.log(`exp_time_est = ${exp_time_est}, t=${t}`)\n        console.log(`total time est = ${(exp_time_est * (N - i - 1))/1000/60}min`)\n    }\n    console.log('best_t = ' + best_t)\n    console.log('best_seed = ' + best_seed)\n    console.log('longest = ' + longest)\n    console.log('longest_seed = ' + longest_seed)\n\n    console.log('time(sec) = ' + (performance.now() - ST)/1000)\n\n    g_profile.end('whole thing')\n\n    g_profile.print()\n\n    // console.log('times: ' + JSON.stringify(times))\n}\n\nasync function run_experiment(rand_seed) {\n    Math.randomSeed(rand_seed)\n\n    g_debug_WS_messages = []\n    g_debug_WS_messages_delayed = []\n    debug_WSS.the_one = null\n\n    var trials = 30\n\n    var db = create_db()\n    var server = null\n    var clients = []\n\n    var log_stuff = false\n\n    var actions = []\n\n    for (var t = 0; t < trials; t++) {\n        Date.now = () => t\n        var st = performance.now()\n        try {\n            log_stuff && console.log('----------------------------- trial ' + t)\n\n            if (!server && Math.random() < 0.4) {\n                log_stuff && console.log('> starting server')\n                actions.push({action: 'starting server', rand: Math.random.get_state()})\n                server = await create_server(db)\n            } else if (server && Math.random() < 0.3) {\n                log_stuff && console.log('> closing server')\n                actions.push({action: 'closing server', rand: Math.random.get_state()})\n                server.close()\n                server = null\n            } else {\n                if (clients.length == 0 || (clients.length < 5 && Math.random() < 0.2)) {\n                    log_stuff && console.log('> creating client')\n                    actions.push({action: 'creating client', rand: Math.random.get_state()})\n                    clients.push(create_client())\n                } else {\n                    let ci = Math.floor(Math.random() * clients.length)\n                    let c = clients[ci]\n                    if (!c.is_open && Math.random() < 0.3) {\n                        log_stuff && console.log('> re-opening client')\n                        actions.push({action: 're-opening client', id: c.id, rand: Math.random.get_state()})\n                        c.open()\n                    } else if (c.is_open && Math.random() < 0.4) {\n                        if (Math.random() < 0.5) {\n                            log_stuff && console.log('> closing client (temporarily)')\n                            actions.push({action: 'closing client (temporarily)', id: c.id, rand: Math.random.get_state()})\n                            c.close(false, false)\n                        } else {\n                            var send_forget = Math.random() < 0.333\n                            var send_deletes = send_forget || Math.random() < 0.5\n                            log_stuff && console.log('> killing client' + (send_deletes ? ', sending deletes' : '') + (send_forget ? ', sending forget' : ''))\n                            actions.push({action: 'killing client', send_forget, send_deletes, id: c.id, rand: Math.random.get_state()})\n                            c.close(send_deletes, send_forget)\n                            clients.splice(ci, 1)\n                        }\n                    } else if (c.is_open) {\n\n                        var inner_actions = []\n\n                        for (let cii = 0; cii < clients.length; cii++) {\n                            if (cii == ci || Math.random() < 0.2) {\n                                let c = clients[cii]\n                                let text = c.get()\n                                let start = Math.floor(Math.random() * (text.length + 1))\n                                let len = Math.floor(Math.random() * (text.length - start + 1))\n                                let ins = String.fromCharCode(65 + Math.floor(Math.random() * 26)).repeat(Math.floor(Math.random() * 4) + (len == 0 ? 1 : 0))\n                                log_stuff && console.log(`> C-${c.id} changing text ` + JSON.stringify(text) + `.splice(${start}, ${len}, ${JSON.stringify(ins)})`)\n                                inner_actions.push({start, len, ins, id: c.id, rand: Math.random.get_state()})\n                                c.set(start, len, ins)\n                            }\n                        }\n\n                        actions.push({action: 'editing', inner_actions, rand: Math.random.get_state()})\n                        \n                    } else {\n                        log_stuff && console.log('> doing nothing..')\n                        actions.push({action: 'doing nothing..'})\n                    }\n                }\n            }\n\n            debug_WS_process_messages()\n\n            log_stuff && console.log(`server: ${server ? `\"${server.get()}\"` : 'down'}`)\n            log_stuff && clients.forEach(c => console.log(`${c.id} client ${c.is_open ? ':' : 'X'} \"${c.get()}\"`))\n\n            if (true) {\n                // console.log('SERVER: ' + (server ? server.get_more() : 'down'))\n                // clients.forEach(c => console.log(`CLIENT ${c.id} = ${c.get_more()}`))\n\n                // console.log('SERVER: ' + (g_current_server ? g_current_server.get_null() : 'not started'))\n                // clients.forEach(c => console.log(`CLIENT ${c.id} = ${c.get_null()}`))\n            }\n\n            if (server && clients.some(c => c.is_open)) {\n                let text = server.get()\n                if (clients.some(c => c.is_open && c.get() != text)) {\n                    console.log('NOT THE SAME!')\n                    return {ok: false, t, actions}\n                }\n\n                // work here\n                let o = server.node.resource_at(page_key).mergeable.read()\n                if (!o || !o.cursors || Object.keys(o.cursors).length > clients.length) {\n                    console.log('TOO MANY CURSORS!')\n                    return {ok: false, t, actions}\n                }\n            }\n        } catch (e) {\n            console.log('EXCEPTION', e)\n            return {ok: false, t, actions}\n        }\n        //actions.push({time: performance.now() - st})\n    }\n\n    return {ok: true, actions}\n}\n\nasync function run_experiment_from_actions(actions) {\n    Math.randomSeed('just needed to make set_state available')\n\n    g_debug_WS_messages = []\n    g_debug_WS_messages_delayed = []\n    debug_WSS.the_one = null\n\n    var db = create_db()\n    var server = null\n    var clients = []\n\n    var log_stuff = true\n\n    var t = 0\n    for (var a of actions) {\n        Date.now = () => t\n\n        // console.log('a.action = ' + a.action)\n\n        try {\n            log_stuff && console.log('----------------------------- trial ' + t)\n\n            if (a.action == 'starting server') {\n                log_stuff && console.log('> starting server')\n                Math.random.set_state(a.rand)\n                server = await create_server(db)\n            } else if (a.action == 'closing server') {\n                log_stuff && console.log('> closing server')\n                Math.random.set_state(a.rand)\n                server.close()\n                server = null\n            } else {\n                if (a.action == 'creating client') {\n                    log_stuff && console.log('> creating client')\n                    Math.random.set_state(a.rand)\n                    clients.push(create_client())\n                } else {\n                    if (a.action == 're-opening client') {\n                        log_stuff && console.log('> re-opening client')\n                        var c = clients.find(c => c.id == a.id)\n                        Math.random.set_state(a.rand)\n                        c.open()\n                    } else if (a.action == 'closing client (temporarily)') {\n                        log_stuff && console.log('> closing client (temporarily)')\n                        var c = clients.find(c => c.id == a.id)\n                        Math.random.set_state(a.rand)\n                        c.close(false)\n                    } else if (a.action == 'killing client') {\n                        log_stuff && console.log('> killing client' + (a.send_deletes ? ', sending deletes' : '') + (a.send_forget ? ', sending forget' : ''))\n                        var c = clients.find(c => c.id == a.id)\n                        Math.random.set_state(a.rand)\n                        c.close(a.send_deletes, a.send_forget)\n                        clients.splice(clients.findIndex(c => c.id == a.id), 1)\n                    } else if (a.action == 'editing') {\n                        for (let inner_a of a.inner_actions) {\n                            let start = inner_a.start\n                            let len = inner_a.len\n                            let ins = inner_a.ins\n                            let c = clients.find(c => c.id == inner_a.id)\n                            let text = c.get()\n                            log_stuff && console.log(`> C-${c.id} changing text ` + JSON.stringify(text) + `.splice(${start}, ${len}, ${JSON.stringify(ins)})`)\n                            Math.random.set_state(inner_a.rand)\n                            c.set(start, len, ins)\n                        }\n                        Math.random.set_state(a.rand)\n                    } else if (a.action == 'doing nothing..') {\n                        log_stuff && console.log('> doing nothing..')\n                    } else throw 'bad'\n                }\n            }\n\n            debug_WS_process_messages()\n\n            log_stuff && console.log(`server: ${server ? `\"${server.get()}\"` : 'down'}`)\n            log_stuff && clients.forEach(c => console.log(`${c.id} client ${c.is_open ? ':' : 'X'} \"${c.get()}\"`))\n\n\n            if (true) {\n\n                console.log('time dags:')\n                var show = (s) => console.log(JSON.stringify(s.time_dag, null, '    '))\n                if (g_current_server) show(g_current_server.node.resource_at(page_key))\n                clients.forEach(c => show(c.node.resource_at(page_key)))\n\n                // console.log('version_cache:')\n                // var show = (s) => console.log(JSON.stringify(s.version_cache, null, '    '))\n                // if (g_current_server) show(g_current_server.node.resource_at(page_key))\n                // clients.forEach(c => show(c.node.resource_at(page_key)))\n\n                // console.log('incoming_subscriptions:')\n                // var show = (s) => console.log(s.incoming_subscriptions.toString())\n                // if (g_current_server) show(g_current_server.node)\n                // clients.forEach(c => show(c.node))\n\n                // console.log('space dags:')\n                // var show = (s) => console.log(JSON.stringify(s.space_dag, null, '    '))\n                // if (g_current_server) show(g_current_server.node.resource_at(page_key))\n                // clients.forEach(c => show(c.node.resource_at(page_key)))\n\n                // console.log('read:')\n                // function show2(s) {\n                //     console.log(JSON.stringify(s.mergeable && s.mergeable.read(), null, '    '))\n                // }\n\n                // if (g_current_server) show2(g_current_server.node.resource_at(page_key))\n                // clients.forEach(c => show2(c.node.resource_at(page_key)))\n\n\n                // console.log('fiss:')\n                // function show3(s) {\n                //     console.log(JSON.stringify(s.fissures, null, '    '))\n                // }\n\n                // if (g_current_server) show3(g_current_server.node.resource_at(page_key))\n                // clients.forEach(c => show3(c.node.resource_at(page_key)))\n\n\n                // console.log('fissures:')\n                // function show2(s) { console.log(JSON.stringify(s.fissures, null, '    ')) }\n\n                // if (g_current_server) show2(g_current_server.node.resource_at(page_key))\n                // clients.forEach(c => show2(c.node.resource_at(page_key)))\n\n\n\n                // console.log('full versions:')\n                // function show(s) { console.log(JSON.stringify(s, null, '    ')) }\n\n                // if (g_current_server) show(g_current_server.node.resource_at(page_key))\n                // clients.forEach(c => show(c.node.resource_at(page_key)))\n\n                // console.log('SERVER: ', (g_current_server ? g_current_server.node.resource_at(page_key).mergeable.read() : 'not started'))\n\n                // console.log('SERVER: ' + (g_current_server ? g_current_server.get_time() : 'not started'))\n                // clients.forEach(c => console.log(`CLIENT ${c.id} = ${c.get_time()}`))\n\n                // clients.forEach(c => console.log(`CLIENT ${c.id} = ${c.get_more()}`))\n\n                // console.log('null versions:')\n                // console.log('SERVER: ', (g_current_server ? g_current_server.get_null() : 'not started'))\n                //clients.forEach(c => console.log(`CLIENT ${c.id} = ${c.get_null()}`))\n\n                // if (g_current_server)\n                //     console.log('SERVER: ' + JSON.stringify(g_current_server.node.resource_at(page_key), null, '    '))\n\n                // console.log('fissures:')\n                // console.log('SERVER: ', (g_current_server ? g_current_server.node.resource_at(page_key).fissures : 'not started'))\n            }\n\n            if (server && clients.some(c => c.is_open)) {\n                let text = server.get()\n                if (clients.some(c => c.is_open && c.get() != text)) {\n                    console.log('NOT THE SAME!')\n                    return {ok: false, t}\n                }\n\n\n                // work here\n                let o = server.node.resource_at(page_key).mergeable.read()\n                if (!o || !o.cursors || Object.keys(o.cursors).length > clients.length) {\n                    console.log('TOO MANY CURSORS!')\n                    return {ok: false, t, actions}\n                }\n\n            }\n        } catch (e) {\n            console.log('EXCEPTION', e)\n            return {ok: false, t}\n        }\n        t++\n    }\n    return {ok: true}\n}\n\nmain()\n\nfunction create_db() {\n    return g_db = {\n        data: {},\n        get(key) { return this.data[key] },\n        set(key, val) { this.data[key] = val },\n        del(key) { delete this.data[key] },\n        list_keys() { return Object.keys(this.data) }\n    }\n}\n\nasync function create_server(db) {\n    db.compress_if_inactive_time = 1000 * 1000\n    db.compress_after_this_many = 10\n\n    var node = require('../node.js')()\n    //node.fissure_lifetime = 1 // 4\n    await require('../store.js')(node, db)\n\n    node.on_errors.push((key, origin) => {\n        node.unbind(key, origin)\n    })\n\n    var wss = require('../websocket-server.js')(node, {wss: new debug_WSS()})\n\n    return g_current_server = {\n        node,\n        get() {\n            var o = node.resource_at(page_key).mergeable.read()\n            return o && o.text\n        },\n        close() {\n            wss.dead = true\n            wss.close()\n        }\n    }\n}\n\nfunction create_client() {\n    var node = require('../node.js')()\n    node.default(page_key, {cursors: {}, text: ''})\n    var ws_client = require('../websocket-client.js')({node, create_websocket: () => {\n        return new debug_WS(node.pid)\n    }})\n\n    var cursor_lifetime = 1 // 10000\n\n    var ready = false\n    var text = ''\n    var selectionStart = 0\n    var selectionEnd = 0\n\n    function send_diff(from, to) {\n        var v = node.set(page_key, null, ds.diff_convert_to_my_format(ds.diff_main(from, to)).map(x =>\n            `.text[${x[0]}:${x[0] + x[1]}] = ${JSON.stringify(x[2])}`\n        ))\n    }\n\n    function send_cursor_update(start, end) {\n        node.set(page_key, null, [`.cursors[${JSON.stringify(node.pid)}] = ${JSON.stringify({start: {type: 'location', path: `.text[${start}]`}, end: {type: 'location', path: `.text[${end}]`}, time: Date.now()})}`])\n    }\n\n    var cb = x => {\n        ready = true\n        text = x.text\n        if (x.cursors[node.pid]) {\n            selectionStart = x.cursors[node.pid].start\n            selectionEnd = x.cursors[node.pid].end\n        }\n    }\n    node.get(page_key, cb)\n\n    node.ons.push((method, arg) => {\n        if (method != 'welcome' && method != 'fissure') return\n        if (arg.key != page_key) return\n\n        var fs = {}\n        if (method == 'welcome') {\n            for (let f of arg.fissures)\n                fs[`${f.a}:${f.b}:${f.conn}`] = f\n        } else {\n            let f = arg.fissure\n            fs[`${f.a}:${f.b}:${f.conn}`] = f\n        }\n\n        var rest = () => {\n            var o = node.resource_at(page_key).mergeable.read()\n            if (!o || !o.cursors) return\n\n            Object.assign(fs, node.resource_at(page_key).fissures)\n    \n            var delete_us = {}\n            Object.values(fs).forEach(f => {\n                if (!fs[`${f.b}:${f.a}:${f.conn}`]) {\n                    if (o.cursors[f.b]) delete_us[f.b] = true\n                }\n            })\n\n            var now = Date.now()\n            Object.entries(o.cursors).forEach(([k, v]) => {\n                if (k != node.pid && v.time <= now - cursor_lifetime) delete_us[k] = true\n            })\n\n            var patches = Object.keys(delete_us).map(k => `delete .cursors[${JSON.stringify(k)}]`)\n            if (patches.length) node.set(page_key, null, patches)\n        }\n        if (g_debug_WS_messages) g_debug_WS_messages.push(rest)\n        else setTimeout(rest, 0)\n    })\n\n    node.on_errors.push((key, origin) => {\n        // console.log('CLIENT ON ERROR')\n\n        text = ''\n        selectionStart = 0\n        selectionEnd = 0\n\n        delete node.resources[key]\n        node.unbind(key, origin)\n\n        var subscribe = ws_client.pipe.subscribed_keys[key].we_requested\n        delete ws_client.pipe.subscribed_keys[key].we_requested\n\n        ws_client.pipe.send({\n            key,\n            subscribe,\n            method: 'get'\n        })\n    })\n\n    var self\n    return self = {\n        id: node.pid,\n        node,\n        is_open: true,\n        get: () => {\n            return text\n        },\n        set: (x, del, ins) => {\n            if (!ready) return\n            var new_text = text.slice(0, x) + ins + text.slice(x + del)\n            send_diff(text, new_text)\n            if (x + ins.length <= new_text.length)\n                send_cursor_update(x + ins.length, x + ins.length)\n            else\n                send_cursor_update(new_text.length, new_text.length)\n        },\n        close: (send_deletes, send_forget) => {\n            if (ready && send_deletes) node.set(page_key, null, [`delete .cursors[${JSON.stringify(node.pid)}]`])\n            if (send_forget) node.forget(page_key, cb)\n            ws_client.disable()\n            self.is_open = false\n        },\n        open: () => {\n            ws_client.enable()\n            self.is_open = true\n        }\n    }    \n}\n"
  },
  {
    "path": "kernel/websocket-client.js",
    "content": "// Example braid-peer as a web browser client\n\nmodule.exports = require['websocket-client'] = function add_websocket_client({node, url, prefix, create_websocket}) {\n    url = url       || 'ws://localhost:3007/'\n    prefix = prefix || '/*'\n\n    var client_creds = null\n    var enabled = true\n    var sock\n\n    create_websocket = create_websocket || function () {\n        return new WebSocket(url + '.braid-websocket')\n    }\n\n    var reconnect_timeout = null\n    var listeners = {};\n\n    var addEventListener = (type, cb) => {\n        if (!(type in listeners)) {\n            listeners[type] = [];\n        }\n        listeners[type].push(cb);\n    }\n    var dispatchEvent = (event) => {\n        if (!(event.type in listeners)) {\n            return true;\n        }\n        var stack = listeners[event.type].slice();\n    \n        for (var i = 0, l = stack.length; i < l; i++) {\n            stack[i].call(this, event);\n        }\n        return !event.defaultPrevented;\n    }\n\n    var connect = () => {\n        clearTimeout(reconnect_timeout)\n        if (!enabled) { return }\n\n        sock           = create_websocket()\n        sock.onopen    = ()  => {\n            if (onclose_called_already) { return }\n            pipe.connected()\n            dispatchEvent({type: \"connect\"})\n        }\n        sock.onmessage = message => {\n            if (onclose_called_already) { return }\n            var text = message.data;\n            var msg = JSON.parse(text);\n            if (msg.method != \"ping\" && msg.method != \"pong\") {\n                nlogf('WS', 'remote', '-->', 'local ', msg);\n            }\n            pipe.recv(msg)\n        }\n        var onclose_called_already = false\n        var local_sock = sock\n        sock.onclose   = (a)  => {\n            if (onclose_called_already) { return }\n            onclose_called_already = true\n            if (local_sock != sock) { return }\n            \n            pipe.disconnected()\n            if (enabled) {\n                if (typeof(g_debug_WS_messages_delayed) != 'undefined')\n                    g_debug_WS_messages_delayed.push(connect)\n                else reconnect_timeout = setTimeout(connect, 5000)\n            }\n            dispatchEvent({type: \"disconnect\"});\n        }\n        sock.onerror = () => {}\n    }\n    var disconnect = () => {\n        sock.close()\n        sock.onclose()\n    }\n\n    var pipe = require('./pipe.js')({\n        id: node.pid,\n        type: 'ws-client',\n        node,\n        connect,\n        disconnect,\n        send: (msg) => {\n            let text = JSON.stringify(msg);\n            if (msg.method != \"ping\" && msg.method != \"pong\") {\n                nlogf('WS', 'local ', '-->', 'remote', msg);\n            }\n            sock.send(text);\n        }\n    })\n    node.bind(prefix, pipe)\n\n    return {\n        pipe,\n        addEventListener,\n        enabled() {return enabled},\n        enable()  {nlog('ENABLING PIPE', pipe.id);enabled = true; connect()},\n        disable() {nlog('DISABLING PIPE',pipe.id);enabled = false; disconnect()},\n        toggle()  {if (enabled) {disable()} else enable()}\n    }\n}\n"
  },
  {
    "path": "kernel/websocket-server.js",
    "content": "// Example braid-peer as a web server\n// options = {\n//     port: // default is 3007\n//     wss: // default is null, will create a 'ws' module WebSocket.Server with the given port\n// }\nmodule.exports = require['websocket-server'] = function add_websocket_server(node, options) {\n    if (!options) options = {}\n    var s = options.wss || new (require('ws')).Server({port: options.port || 3007})\n    s.on('connection', function(conn, req) {\n        var pipe = require('./pipe.js')({node, connect, disconnect, send})\n        const peer_name = (m) => (pipe.remote_peer || (m || {}).my_name_is || 'C-?').toString();\n        const ip = req.socket.remoteAddress;\n        // console.log(`New connection from ${ip}`)\n        conn.on('message', (text) => {\n            var msg = JSON.parse(text);\n            if (msg.method != \"ping\" && msg.method != \"pong\") {\n                nlogf('WS', peer_name(msg).slice(0,6).padEnd(6), '-->', 'server', msg);\n            }\n            pipe.recv(msg)\n        })\n        conn.on('close', () => {\n            log('ws: socket closed ', s.dead ? '<<dead>>' : '')\n            if (s.dead) return\n            pipe.disconnected()\n        })\n        pipe.connected()\n\n        function connect () {\n            // we're connected already, nothing to do\n            log('ws-serve: connected')\n            // pipe.connected() <-- this is called just above\n        }\n        function disconnect () {\n            conn.terminate()\n        }\n        function send (msg) {\n            let text = JSON.stringify(msg);\n            if (msg.method != \"ping\" && msg.method != \"pong\") {\n                nlogf('WS', 'server', '-->', peer_name().slice(0,6).padEnd(6), msg);\n            }\n            conn.send(text);\n        }\n    })\n    return s\n}"
  },
  {
    "path": "readme.md",
    "content": "# The Braidjs Monorepo\n\nBy versioning our code together, it becomes easier to interoperate.\n\n  - Each top-level folder is a project.  Add yours!\n  - Now you can make breaking changes (like a protocol change), without\n    actually *breaking* anything—upgrade all the relevant code, across\n    multiple projects, at once!\n\nThis is not my code.  This is *our* code.\n\n### Projects\n\nAdd yours today!\n\n - `antimatter`: [An implementation of the Antimatter Algorithm](https://github.com/braid-org/braidjs/tree/master/antimatter)\n - `antimatter_wiki`: [An example Wiki using Antimatter](https://github.com/braid-org/braidjs/tree/master/antimatter_wiki)\n - `braid-http`: [A reference implementation of the Braid Protocol](https://github.com/braid-org/braidjs/tree/master/braid-http)\n - `json-patch`: [Applies a Range-Patch to JSON](https://github.com/braid-org/braidjs/tree/master/json-patch)\n - `kernel`: [A prototype Braid Kernel](https://github.com/braid-org/braidjs/tree/master/kernel)\n - `simpleton`: [A very simple and fast CRDT sync for light clients](https://github.com/braid-org/braidjs/tree/master/simpleton)\n - `sync9`: [A CRDT that supports pruning history](https://github.com/braid-org/braidjs/tree/master/sync9)\n - `util`: [A set of common utilities](https://github.com/braid-org/braidjs/tree/master/util)\n\nRead more about braid at https://braid.org!\n\n### Faq\n\nQ. Wait... can a single repo support multiple NPM packages?\n\n  - A. Yep!  Just create a `package.json` in your project's root folder, and\n    then run `npm publish` from it.\n"
  },
  {
    "path": "simple_d_ton/index.js",
    "content": "console.log(\"v13\")\n\nlet { Doc, Branch, OpLog } = require(\"diamond-types-node\")\nlet braidify = require(\"braid-http\").http_server\nlet fs = require(\"fs\")\n\nlet waiting_puts = 0\nlet prev_put_p = null\n\nasync function simple_d_ton(req, res, options = {}) {\n    options = {\n        db_folder: null,                 // Default db_folder\n        key: req.url.split('?')[0],      // Default key\n        ...options                       // Override with all options passed in\n    }\n    \n    let resource = await get_resource(options.key, options.db_folder)\n\n    braidify(req, res)\n\n    let peer = req.headers[\"peer\"]\n    res.my_peer = peer\n\n    let desired_type = options.type ?? req.headers.accept?.split(',')[0]\n\n    res.setHeader(\"Access-Control-Allow-Origin\", \"*\")\n    res.setHeader(\"Access-Control-Allow-Methods\", \"*\")\n    res.setHeader(\"Access-Control-Allow-Headers\", \"*\")\n    res.setHeader(\"Access-Control-Expose-Headers\", \"*\")\n\n    function my_end(statusCode, x) {\n        res.statusCode = statusCode\n        res.end(x ?? '')\n    }\n\n    if (req.method == \"OPTIONS\") return my_end(200)\n\n    if (req.method == \"DELETE\") {\n        await resource.delete_me()\n        return my_end(200)\n    }\n\n    if ((req.method == \"GET\" || req.method == \"HEAD\") && (desired_type != \"text/html\") && req.subscribe) {\n        res.setHeader(\"Content-Type\", desired_type + '; charset=utf-8')\n        res.setHeader(\"Editable\", \"true\")\n        if (req.headers[\"merge-type\"] != \"dt\") {\n            res.setHeader(\"Merge-Type\", \"simpleton\")\n\n            if (req.method == \"HEAD\") return my_end(200)\n\n            res.startSubscription({\n                onClose: (_) => resource.simpleton_clients.delete(res),\n            })\n\n            let version = resource.doc.getRemoteVersion().map((x) => encode_version(...x))\n            let x = { version }\n\n            if (!req.parents && !req.version) {\n                x.parents = []\n                x.body = resource.doc.get()\n                res.sendVersion(x)\n            } else {\n                x.parents = req.version ? req.version : req.parents\n                res.my_last_seen_version = x.parents\n\n                // only send them a version from these parents if we have these parents (otherwise we'll assume these parents are more recent, probably versions they created but haven't sent us yet, and we'll send them appropriate rebased updates when they send us these versions)\n                let local_version = OpLog_remote_to_local(resource.doc, x.parents)\n                if (local_version) {\n                    x.patches = get_xf_patches(resource.doc, local_version)\n                    res.sendVersion(x)\n                }\n            }\n\n            res.my_last_sent_version = version\n            resource.simpleton_clients.add(res)\n        } else {\n            res.setHeader(\"Merge-Type\", \"dt\")\n\n            if (req.method == \"HEAD\") return my_end(200)\n\n            res.startSubscription({ onClose: (_) => resource.clients.delete(res) })\n\n            let updates = null\n\n            if (resource.need_defrag) {\n                console.log(`doing defrag..`)\n                resource.need_defrag = false\n                resource.doc = defrag_dt(resource.doc)\n            }\n\n            if (!req.parents && !req.version) {\n                res.sendVersion({\n                    version: [\"root\"],\n                    parents: [],\n                    body: \"\",\n                })\n\n                updates = OpLog_get_patches(resource.doc.toBytes(), resource.doc.getOpsSince([]))\n            } else {\n                // Then start the subscription from the Parents in request\n                let parents = Object.fromEntries((req.parents ? req.parents : req.version).map((x) => [x, true]))\n\n                let local_version = []\n                let [agents, versions, parentss] = parseDT([...resource.doc.toBytes()])\n                for (let i = 0; i < versions.length; i++) {\n                    if (parents[versions[i].join(\"-\")]) local_version.push(i)\n                }\n                local_version = new Uint32Array(local_version)\n\n                updates = OpLog_get_patches(resource.doc.getPatchSince(local_version), resource.doc.getOpsSince(local_version))\n            }\n\n            for (let u of updates) {\n                u.version = decode_version(u.version)\n                u.version[1] += u.end - u.start - 1\n                u.version = u.version.join(\"-\")\n\n                res.sendVersion({\n                    version: [u.version],\n                    parents: u.parents,\n                    patches: [{ unit: u.unit, range: u.range, content: u.content }],\n                })\n            }\n\n            // Output at least *some* data, or else chrome gets confused and\n            // thinks the connection failed.  This isn't strictly necessary,\n            // but it makes fewer scary errors get printed out in the JS\n            // console.\n            if (updates.length === 0) res.write(\"\\r\\n\")\n\n            resource.clients.add(res)\n        }\n        return\n    }\n\n    if (req.method == \"GET\" || req.method == \"HEAD\") {\n        res.setHeader(\"Content-Type\", desired_type + '; charset=utf-8')\n        res.setHeader(\"Accept-Subscribe\", \"true\")\n\n        let doc = null\n        if (req.version || req.parents) {\n            let frontier = {}\n            req.version?.forEach((x) => (frontier[x] = true))\n            req.parents?.forEach((x) => (frontier[x] = true))\n\n            let local_version = []\n            let [agents, versions, parentss] = parseDT([...resource.doc.toBytes()])\n            for (let i = 0; i < versions.length; i++) {\n                if (frontier[versions[i].join(\"-\")]) {\n                    local_version.push(i)\n                }\n            }\n            local_version = new Uint32Array(local_version)\n\n            let after_versions = {}\n            let [_, after_versions_array, __] = parseDT([...resource.doc.getPatchSince(local_version)])\n            for (let v of after_versions_array) after_versions[v.join(\"-\")] = true\n\n            let new_doc = new Doc()\n            let op_runs = resource.doc.getOpsSince([])\n            let i = 0\n            op_runs.forEach((op_run) => {\n                let parents = parentss[i].map((x) => x.join(\"-\"))\n                let start = op_run.start\n                let end = start + 1\n                let content = op_run.content?.[0]\n\n                let len = op_run.end - op_run.start\n                let base_i = i\n                for (let j = 1; j <= len; j++) {\n                    let I = base_i + j\n                    if (\n                        j == len ||\n                        parentss[I].length != 1 ||\n                        parentss[I][0][0] != versions[I - 1][0] ||\n                        parentss[I][0][1] != versions[I - 1][1] ||\n                        versions[I][0] != versions[I - 1][0] ||\n                        versions[I][1] != versions[I - 1][1] + 1\n                    ) {\n                        for (; i < I; i++) {\n                            let version = versions[i].join(\"-\")\n                            if (!after_versions[version]) {\n                                new_doc.mergeBytes(\n                                    OpLog_create_bytes(\n                                        version,\n                                        parentss[i].map((x) => x.join(\"-\")),\n                                        content ? start + (i - base_i) : start,\n                                        content?.[0]\n                                    )\n                                )\n                            }\n                            if (op_run.content) content = content.slice(1)\n                        }\n                        content = \"\"\n                    }\n                    if (op_run.content) content += op_run.content[j]\n                }\n            })\n            doc = new_doc\n        } else doc = resource.doc\n        const buffer = Buffer.from(doc.get(), \"utf8\")\n\n        res.setHeader(\"Content-Length\", buffer.length)\n\n        res.setHeader(\n            \"Version\",\n            doc\n                .getRemoteVersion()\n                .map((x) => encode_version(...x))\n                .map((x) => JSON.stringify(x))\n                .join(\", \")\n        )\n\n        if (req.method == \"HEAD\") return my_end(200)\n\n        return my_end(200, buffer)\n    }\n\n    if (req.method == \"PUT\" || req.method == \"POST\" || req.method == \"PATCH\") {\n        if (waiting_puts >= 100) {\n            console.log(`The server is busy.`)\n            return my_end(503, \"The server is busy.\")\n        }\n\n        waiting_puts++\n        console.log(`waiting_puts(after++) = ${waiting_puts}`)\n\n        let my_prev_put_p = prev_put_p\n        let done_my_turn = null\n        prev_put_p = new Promise(\n            (done) =>\n                (done_my_turn = (statusCode, x) => {\n                    waiting_puts--\n                    console.log(`waiting_puts(after--) = ${waiting_puts}`)\n                    my_end(statusCode, x)\n                    done()\n                })\n        )\n        let patches = await req.patches()\n        await my_prev_put_p\n\n        if (patches[0]?.unit === 'everything') {\n            patches[0].unit = 'text'\n            patches[0].range = `[0:${count_code_points(resource.doc.get())}]`\n        }\n\n        let og_patches = patches\n        patches = patches.map((p) => ({\n            ...p,\n            range: p.range.match(/\\d+/g).map((x) => parseInt(x)),\n            ...(p.content ? {content: [...p.content]} : {}),\n        }))\n\n        let change_count = patches.reduce((a, b) => a + b.content.length + (b.range[1] - b.range[0]), 0)\n\n        let og_v = req.version[0] || `${Math.random().toString(36).slice(2, 7)}-${change_count - 1}`\n\n        // reduce the version sequence by the number of char-edits\n        let v = decode_version(og_v)\n        v = encode_version(v[0], v[1] + 1 - change_count)\n\n        let parents = resource.doc.getRemoteVersion().map((x) => encode_version(...x))\n        let og_parents = req.parents || parents\n        let ps = og_parents\n        if (!ps.length) ps = [\"root\"]\n\n        let v_before = resource.doc.getLocalVersion()\n\n        let bytes = []\n\n        let offset = 0\n        for (let p of patches) {\n            // delete\n            for (let i = p.range[0]; i < p.range[1]; i++) {\n                bytes.push(OpLog_create_bytes(v, ps, p.range[1] - 1 + offset, null))\n                offset--\n                ps = [v]\n                v = decode_version(v)\n                v = encode_version(v[0], v[1] + 1)\n            }\n            // insert\n            for (let i = 0; i < p.content?.length ?? 0; i++) {\n                let c = p.content[i]\n                bytes.push(OpLog_create_bytes(v, ps, p.range[1] + offset, c))\n                offset++\n                ps = [v]\n                v = decode_version(v)\n                v = encode_version(v[0], v[1] + 1)\n            }\n        }\n\n        try {\n            for (let b of bytes) resource.doc.mergeBytes(b)\n        } catch (e) {\n            console.log(`EEE= ${e}:${e.stack}`)\n            // we couldn't apply the version, presumably because we're missing its parents.\n            // we want to send a 4XX error, so the client will resend this request later,\n            // hopefully after we've received the necessary parents.\n\n            // here are some 4XX error code options..\n            //\n            // - 425 Too Early\n            //     - pros: our message is too early\n            //     - cons: associated with some \"Early-Data\" http thing, which we're not using\n            // - 400 Bad Request\n            //     - pros: pretty generic\n            //     - cons: implies client shouldn't resend as-is\n            // - 409 Conflict\n            //     - pros: doesn't imply modifications needed\n            //     - cons: the message is not conflicting with anything\n            // - 412 Precondition Failed\n            //     - pros: kindof true.. the precondition of having another version has failed..\n            //     - cons: not strictly true, as this code is associated with http's If-Unmodified-Since stuff\n            // - 422 Unprocessable Content\n            //     - pros: it's true\n            //     - cons: implies client shouldn't resend as-is (at least, it says that here: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/422)\n            // - 428 Precondition Required\n            //     - pros: the name sounds right\n            //     - cons: typically implies that the request was missing an http conditional field like If-Match. that is to say, it implies that the request is missing a precondition, not that the server is missing a precondition\n            return done_my_turn(425, \"The server is missing the parents of this version.\")\n        }\n\n        resource.need_defrag = true\n\n        let v_after = resource.doc.getLocalVersion()\n        if (JSON.stringify(v_before) === JSON.stringify(v_after)) {\n            console.log(`we got a version we already had: ${v_before}`)\n            return done_my_turn(200)\n        }\n\n        if (req.headers[\"merge-type\"] != \"dt\") {\n            patches = get_xf_patches(resource.doc, v_before)\n            console.log(JSON.stringify({ patches }))\n\n            let version = resource.doc.getRemoteVersion().map((x) => encode_version(...x))\n\n            for (let client of resource.simpleton_clients) {\n                if (client.my_peer == peer) {\n                    client.my_last_seen_version = [og_v]\n                }\n\n                function set_timeout(time_override) {\n                    if (client.my_timeout) clearTimeout(client.my_timeout)\n                    client.my_timeout = setTimeout(() => {\n                        let version = resource.doc.getRemoteVersion().map((x) => encode_version(...x))\n                        let x = { version }\n                        x.parents = client.my_last_seen_version\n\n                        console.log(\"rebasing after timeout.. \")\n                        console.log(\"    client.my_unused_version_count = \" + client.my_unused_version_count)\n                        x.patches = get_xf_patches(resource.doc, OpLog_remote_to_local(resource.doc, client.my_last_seen_version))\n\n                        console.log(`sending from rebase: ${JSON.stringify(x)}`)\n                        client.sendVersion(x)\n                        client.my_last_sent_version = x.version\n\n                        delete client.my_timeout\n                    }, time_override ?? Math.min(3000, 23 * Math.pow(1.5, client.my_unused_version_count - 1)))\n                }\n\n                if (client.my_timeout) {\n                    if (client.my_peer == peer) {\n                        if (!v_eq(client.my_last_sent_version, og_parents)) {\n                            // note: we don't add to client.my_unused_version_count,\n                            // because we're already in a timeout;\n                            // we'll just extend it here..\n                            set_timeout()\n                        } else {\n                            // hm.. it appears we got a correctly parented version,\n                            // which suggests that maybe we can stop the timeout early\n                            set_timeout(0)\n                        }\n                    }\n                    continue\n                }\n\n                let x = { version }\n                if (client.my_peer == peer) {\n                    if (!v_eq(client.my_last_sent_version, og_parents)) {\n                        client.my_unused_version_count = (client.my_unused_version_count ?? 0) + 1\n                        set_timeout()\n                        continue\n                    } else {\n                        delete client.my_unused_version_count\n                    }\n\n                    x.parents = req.version\n                    if (!v_eq(version, req.version)) {\n                        console.log(\"rebasing..\")\n                        x.patches = get_xf_patches(resource.doc, OpLog_remote_to_local(resource.doc, [og_v]))\n                    } else {\n                        // this client already has this version,\n                        // so let's pretend to send it back, but not\n                        console.log(`not reflecting back to simpleton`)\n                        client.my_last_sent_version = x.version\n                        continue\n                    }\n                } else {\n                    x.parents = parents\n                    x.patches = patches\n                }\n                console.log(`sending: ${JSON.stringify(x)}`)\n                client.sendVersion(x)\n                client.my_last_sent_version = x.version\n            }\n        } else {\n            if (resource.simpleton_clients.size) {\n                patches = get_xf_patches(resource.doc, v_before)\n                let x = { version: [og_v], parents, patches }\n                console.log(`sending: ${JSON.stringify(x)}`)\n                for (let client of resource.simpleton_clients) {\n                    if (client.my_timeout) continue\n                    client.sendVersion(x)\n                    client.my_last_sent_version = x.version\n                }\n            }\n        }\n\n        let x = {\n            version: [og_v],\n            parents: og_parents,\n            patches: og_patches,\n        }\n        for (let client of resource.clients) {\n            if (client.my_peer != peer) client.sendVersion(x)\n        }\n\n        await resource.db_delta(resource.doc.getPatchSince(v_before))\n\n        options.put_cb?.(options.key, resource.doc.get())\n\n        return done_my_turn(200)\n    }\n\n    throw new Error(\"unknown\")\n}\n\nasync function get_resource(key, db_folder) {\n    let cache = get_resource.cache || (get_resource.cache = {})\n    if (cache[key]) return cache[key]\n\n    let resource = {}\n    resource.clients = new Set()\n    resource.simpleton_clients = new Set()\n\n    resource.doc = new Doc(\"server\")\n\n    let { change, delete_me } = db_folder\n        ? await file_sync(\n              db_folder,\n              encodeURIComponent(key),\n              (bytes) => resource.doc.mergeBytes(bytes),\n              () => resource.doc.toBytes()\n          )\n        : { change: () => {}, delete_me: () => {} }\n\n    resource.db_delta = change\n\n    resource.doc = defrag_dt(resource.doc)\n    resource.need_defrag = false\n\n    resource.delete_me = () => {\n        delete_me()\n        delete cache[key]\n    }\n\n    return (cache[key] = resource)\n}\n\nasync function file_sync(db_folder, filename_base, process_delta, get_init) {\n    let currentNumber = 0\n    let currentSize = 0\n    let threshold = 0\n\n    // Ensure the existence of db_folder\n    try {\n        await fs.promises.access(db_folder);\n    } catch (err) {\n        if (err.code === 'ENOENT') {\n            await fs.promises.mkdir(db_folder, { recursive: true });\n        } else {\n            throw err;\n        }\n    }\n\n    // Read existing files and sort by numbers.\n    async function get_sorted_files() {\n        let re = new RegExp(\"^\" + filename_base.replace(/[^a-zA-Z0-9]/g, \"\\\\$&\") + \"\\\\.\\\\d+$\")\n        return (await fs.promises.readdir(db_folder))\n            .filter((a) => re.test(a))\n            .sort((a, b) => parseInt(a.match(/\\d+$/)[0]) - parseInt(b.match(/\\d+$/)[0]))\n            .map((a) => `${db_folder}/${a}`)\n    }\n\n    const files = await get_sorted_files()\n\n    // Try to process files starting from the highest number.\n    let done = false\n    for (let i = files.length - 1; i >= 0; i--) {\n        if (done) {\n            await fs.promises.unlink(files[i])\n            continue\n        }\n        try {\n            const filename = files[i]\n            console.log(`trying to process file: ${filename}`)\n            const data = await fs.promises.readFile(filename)\n\n            let cursor = 0\n            let isFirstChunk = true\n            while (cursor < data.length) {\n                const chunkSize = data.readUInt32LE(cursor)\n                cursor += 4\n                const chunk = data.slice(cursor, cursor + chunkSize)\n                cursor += chunkSize\n\n                if (isFirstChunk) {\n                    isFirstChunk = false\n                    threshold = chunkSize * 10\n                }\n                process_delta(chunk)\n            }\n\n            currentSize = data.length\n            currentNumber = parseInt(filename.match(/\\d+$/)[0])\n            done = true\n        } catch (error) {\n            console.error(`Error processing file: ${files[i]}`)\n            await fs.promises.unlink(files[i])\n        }\n    }\n\n    return {\n        change: async (bytes) => {\n            currentSize += bytes.length + 4 // we account for the extra 4 bytes for uint32\n            const filename = `${db_folder}/${filename_base}.${currentNumber}`\n            if (currentSize < threshold) {\n                console.log(`appending to db..`)\n\n                let buffer = Buffer.allocUnsafe(4)\n                buffer.writeUInt32LE(bytes.length, 0)\n                await fs.promises.appendFile(filename, buffer)\n                await fs.promises.appendFile(filename, bytes)\n\n                console.log(\"wrote to : \" + filename)\n            } else {\n                try {\n                    console.log(`starting new db..`)\n\n                    currentNumber++\n                    const init = get_init()\n                    const buffer = Buffer.allocUnsafe(4)\n                    buffer.writeUInt32LE(init.length, 0)\n\n                    const newFilename = `${db_folder}/${filename_base}.${currentNumber}`\n                    await fs.promises.writeFile(newFilename, buffer)\n                    await fs.promises.appendFile(newFilename, init)\n\n                    console.log(\"wrote to : \" + newFilename)\n\n                    currentSize = 4 + init.length\n                    threshold = currentSize * 10\n                    try {\n                        await fs.promises.unlink(filename)\n                    } catch (e) {}\n                } catch (e) {\n                    console.log(`e = ${e.stack}`)\n                }\n            }\n        },\n        delete_me: async () => {\n            await Promise.all(\n                (\n                    await get_sorted_files()\n                ).map((file) => {\n                    return new Promise((resolve, reject) => {\n                        fs.unlink(file, (err) => {\n                            if (err) {\n                                console.error(`Error deleting file: ${file}`)\n                                reject(err)\n                            } else {\n                                console.log(`Deleted file: ${file}`)\n                                resolve()\n                            }\n                        })\n                    })\n                })\n            )\n        },\n    }\n}\n\n//////////////////////////////////////////////////////////////////\n//////////////////////////////////////////////////////////////////\n//////////////////////////////////////////////////////////////////\n\nfunction defrag_dt(doc) {\n    let fresh_doc = new Doc(\"server\")\n    fresh_doc.mergeBytes(doc.toBytes())\n    return fresh_doc\n}\n\nfunction OpLog_get_patches(bytes, op_runs) {\n    //   console.log(`op_runs = `, op_runs);\n\n    let [agents, versions, parentss] = parseDT([...bytes])\n\n    //   console.log(JSON.stringify({agents, versions, parentss}, null, 4))\n\n    let i = 0\n    let patches = []\n    op_runs.forEach((op_run) => {\n        let version = versions[i].join(\"-\")\n        let parents = parentss[i].map((x) => x.join(\"-\"))\n        let start = op_run.start\n        let end = start + 1\n        if (op_run.content) op_run.content = [...op_run.content]\n        let content = op_run.content?.[0]\n        let len = op_run.end - op_run.start\n        for (let j = 1; j <= len; j++) {\n            let I = i + j\n            if (\n                j == len ||\n                parentss[I].length != 1 ||\n                parentss[I][0][0] != versions[I - 1][0] ||\n                parentss[I][0][1] != versions[I - 1][1] ||\n                versions[I][0] != versions[I - 1][0] ||\n                versions[I][1] != versions[I - 1][1] + 1\n            ) {\n                patches.push({\n                    version,\n                    parents,\n                    unit: \"text\",\n                    range: content ? `[${start}:${start}]` : `[${start}:${end}]`,\n                    content: content ?? \"\",\n                    start,\n                    end,\n                })\n                if (j == len) break\n                version = versions[I].join(\"-\")\n                parents = parentss[I].map((x) => x.join(\"-\"))\n                start = op_run.start + j\n                content = \"\"\n            }\n            end++\n            if (op_run.content) content += op_run.content[j]\n        }\n        i += len\n    })\n    return patches\n}\n\nfunction parseDT(byte_array) {\n    if (new TextDecoder().decode(new Uint8Array(byte_array.splice(0, 8))) !== \"DMNDTYPS\") throw new Error(\"dt parse error, expected DMNDTYPS\")\n\n    if (byte_array.shift() != 0) throw new Error(\"dt parse error, expected version 0\")\n\n    let agents = []\n    let versions = []\n    let parentss = []\n\n    while (byte_array.length) {\n        let id = byte_array.shift()\n        let len = read_varint(byte_array)\n        if (id == 1) {\n        } else if (id == 3) {\n            let goal = byte_array.length - len\n            while (byte_array.length > goal) {\n                agents.push(read_string(byte_array))\n            }\n        } else if (id == 20) {\n        } else if (id == 21) {\n            let seqs = {}\n            let goal = byte_array.length - len\n            while (byte_array.length > goal) {\n                let part0 = read_varint(byte_array)\n                let has_jump = part0 & 1\n                let agent_i = (part0 >> 1) - 1\n                let run_length = read_varint(byte_array)\n                let jump = 0\n                if (has_jump) {\n                    let part2 = read_varint(byte_array)\n                    jump = part2 >> 1\n                    if (part2 & 1) jump *= -1\n                }\n                let base = (seqs[agent_i] || 0) + jump\n\n                for (let i = 0; i < run_length; i++) {\n                    versions.push([agents[agent_i], base + i])\n                }\n                seqs[agent_i] = base + run_length\n            }\n        } else if (id == 23) {\n            let count = 0\n            let goal = byte_array.length - len\n            while (byte_array.length > goal) {\n                let run_len = read_varint(byte_array)\n\n                let parents = []\n                let has_more = 1\n                while (has_more) {\n                    let x = read_varint(byte_array)\n                    let is_foreign = 0x1 & x\n                    has_more = 0x2 & x\n                    let num = x >> 2\n\n                    if (x == 1) {\n                        parents.push([\"root\"])\n                    } else if (!is_foreign) {\n                        parents.push(versions[count - num])\n                    } else {\n                        parents.push([agents[num - 1], read_varint(byte_array)])\n                    }\n                }\n                parentss.push(parents)\n                count++\n\n                for (let i = 0; i < run_len - 1; i++) {\n                    parentss.push([versions[count - 1]])\n                    count++\n                }\n            }\n        } else {\n            byte_array.splice(0, len)\n        }\n    }\n\n    function read_string(byte_array) {\n        return new TextDecoder().decode(new Uint8Array(byte_array.splice(0, read_varint(byte_array))))\n    }\n\n    function read_varint(byte_array) {\n        let result = 0\n        let shift = 0\n        while (true) {\n            if (byte_array.length === 0) throw new Error(\"byte array does not contain varint\")\n\n            let byte_val = byte_array.shift()\n            result |= (byte_val & 0x7f) << shift\n            if ((byte_val & 0x80) == 0) return result\n            shift += 7\n        }\n    }\n\n    return [agents, versions, parentss]\n}\n\nfunction OpLog_create_bytes(version, parents, pos, ins) {\n    // console.log(`args = ${JSON.stringify({ version, parents, pos, ins }, null, 4)}`)\n\n    function write_varint(bytes, value) {\n        while (value >= 0x80) {\n            bytes.push((value & 0x7f) | 0x80)\n            value >>= 7\n        }\n        bytes.push(value)\n    }\n\n    function write_string(byte_array, str) {\n        let str_bytes = new TextEncoder().encode(str)\n        write_varint(byte_array, str_bytes.length)\n        byte_array.push(...str_bytes)\n    }\n\n    version = decode_version(version)\n    parents = parents.map(decode_version)\n\n    let bytes = []\n    bytes = bytes.concat(Array.from(new TextEncoder().encode(\"DMNDTYPS\")))\n    bytes.push(0)\n\n    let file_info = []\n    let agent_names = []\n\n    let agents = new Set()\n    agents.add(version[0])\n    for (let p of parents) if (p.length > 1) agents.add(p[0])\n    agents = [...agents]\n\n    //   console.log(JSON.stringify({ agents, parents }, null, 4));\n\n    let agent_to_i = {}\n    for (let [i, agent] of agents.entries()) {\n        agent_to_i[agent] = i\n        write_string(agent_names, agent)\n    }\n\n    file_info.push(3)\n    write_varint(file_info, agent_names.length)\n    file_info.push(...agent_names)\n\n    bytes.push(1)\n    write_varint(bytes, file_info.length)\n    bytes.push(...file_info)\n\n    let branch = []\n\n    if (parents[0].length > 1) {\n        let frontier = []\n\n        for (let [i, [agent, seq]] of parents.entries()) {\n            let has_more = i < parents.length - 1\n            let mapped = agent_to_i[agent]\n            let n = ((mapped + 1) << 1) | (has_more ? 1 : 0)\n            write_varint(frontier, n)\n            write_varint(frontier, seq)\n        }\n\n        branch.push(12)\n        write_varint(branch, frontier.length)\n        branch.push(...frontier)\n    }\n\n    bytes.push(10)\n    write_varint(bytes, branch.length)\n    bytes.push(...branch)\n\n    let patches = []\n\n    if (ins) {\n        let inserted_content_bytes = []\n\n        inserted_content_bytes.push(0) // ins (not del, which is 1)\n\n        inserted_content_bytes.push(13) // \"content\" enum (rather than compressed)\n\n        let encoder = new TextEncoder()\n        let utf8Bytes = encoder.encode(ins)\n\n        inserted_content_bytes.push(1 + utf8Bytes.length) // length of content chunk\n        inserted_content_bytes.push(4) // \"plain text\" enum\n\n        for (let b of utf8Bytes) inserted_content_bytes.push(b) // actual text\n\n        inserted_content_bytes.push(25) // \"known\" enum\n        inserted_content_bytes.push(1) // length of \"known\" chunk\n        inserted_content_bytes.push(3) // content of length 1, and we \"know\" it\n\n        patches.push(24)\n        write_varint(patches, inserted_content_bytes.length)\n        patches.push(...inserted_content_bytes)\n    }\n\n    // write in the version\n    let version_bytes = []\n\n    let [agent, seq] = version\n    let agent_i = agent_to_i[agent]\n    let jump = seq\n\n    write_varint(version_bytes, ((agent_i + 1) << 1) | (jump != 0 ? 1 : 0))\n    write_varint(version_bytes, 1)\n    if (jump) write_varint(version_bytes, jump << 1)\n\n    patches.push(21)\n    write_varint(patches, version_bytes.length)\n    patches.push(...version_bytes)\n\n    // write in \"op\" bytes (some encoding of position)\n    let op_bytes = []\n\n    write_varint(op_bytes, (pos << 4) | (pos ? 2 : 0) | (ins ? 0 : 4))\n\n    patches.push(22)\n    write_varint(patches, op_bytes.length)\n    patches.push(...op_bytes)\n\n    // write in parents\n    let parents_bytes = []\n\n    write_varint(parents_bytes, 1)\n\n    if (parents[0].length > 1) {\n        for (let [i, [agent, seq]] of parents.entries()) {\n            let has_more = i < parents.length - 1\n            let agent_i = agent_to_i[agent]\n            write_varint(parents_bytes, ((agent_i + 1) << 2) | (has_more ? 2 : 0) | 1)\n            write_varint(parents_bytes, seq)\n        }\n    } else write_varint(parents_bytes, 1)\n\n    patches.push(23)\n    write_varint(patches, parents_bytes.length)\n    patches.push(...parents_bytes)\n\n    // write in patches\n    bytes.push(20)\n    write_varint(bytes, patches.length)\n    bytes.push(...patches)\n\n    //   console.log(bytes);\n    return bytes\n}\n\nfunction OpLog_remote_to_local(doc, frontier) {\n    let map = Object.fromEntries(frontier.map((x) => [x, true]))\n\n    let local_version = []\n    let [agents, versions, parentss] = parseDT([...doc.toBytes()])\n    for (let i = 0; i < versions.length; i++) {\n        if (map[doc.localToRemoteVersion([i])[0].join(\"-\")]) {\n            local_version.push(i)\n        }\n    }\n\n    return frontier.length == local_version.length && new Uint32Array(local_version)\n}\n\nfunction encode_version(agent, seq) {\n    return agent + \"-\" + seq\n}\n\nfunction decode_version(v) {\n    let a = v.split(\"-\")\n    if (a.length > 1) a[1] = parseInt(a[1])\n    return a\n}\n\nfunction v_eq(v1, v2) {\n    return v1.length == v2.length && v1.every((x, i) => x == v2[i])\n}\n\nfunction get_xf_patches(doc, v) {\n    let patches = []\n    for (let xf of doc.xfSince(v)) {\n        patches.push(\n            xf.kind == \"Ins\"\n                ? {\n                      unit: \"text\",\n                      range: `[${xf.start}:${xf.start}]`,\n                      content: xf.content,\n                  }\n                : {\n                      unit: \"text\",\n                      range: `[${xf.start}:${xf.end}]`,\n                      content: \"\",\n                  }\n        )\n    }\n    return relative_to_absolute_patches(patches)\n}\n\nfunction relative_to_absolute_patches(patches) {\n    let avl = create_avl_tree((node) => {\n        let parent = node.parent\n        if (parent.left == node) {\n            parent.left_size -= node.left_size + node.size\n        } else {\n            node.left_size += parent.left_size + parent.size\n        }\n    })\n    avl.root.size = Infinity\n    avl.root.left_size = 0\n\n    function resize(node, new_size) {\n        if (node.size == new_size) return\n        let delta = new_size - node.size\n        node.size = new_size\n        while (node.parent) {\n            if (node.parent.left == node) node.parent.left_size += delta\n            node = node.parent\n        }\n    }\n\n    for (let p of patches) {\n        let [start, end] = p.range.match(/\\d+/g).map((x) => 1 * x)\n        let del = end - start\n\n        let node = avl.root\n        while (true) {\n            if (start < node.left_size || (node.left && node.content == null && start == node.left_size)) {\n                node = node.left\n            } else if (start > node.left_size + node.size || (node.content == null && start == node.left_size + node.size)) {\n                start -= node.left_size + node.size\n                node = node.right\n            } else {\n                start -= node.left_size\n                break\n            }\n        }\n\n        let remaining = start + del - node.size\n        if (remaining < 0) {\n            if (node.content == null) {\n                if (start > 0) {\n                    let x = { size: 0, left_size: 0 }\n                    avl.add(node, \"left\", x)\n                    resize(x, start)\n                }\n                let x = { size: 0, left_size: 0, content: p.content, del }\n                avl.add(node, \"left\", x)\n                resize(x, count_code_points(x.content))\n                resize(node, node.size - (start + del))\n            } else {\n                node.content = node.content.slice(0, codePoints_to_index(node.content, start)) + p.content + node.content.slice(codePoints_to_index(node.content, start + del))\n                resize(node, count_code_points(node.content))\n            }\n        } else {\n            let next\n            let middle_del = 0\n            while (remaining >= (next = avl.next(node)).size) {\n                remaining -= next.size\n                middle_del += next.del ?? next.size\n                resize(next, 0)\n                avl.del(next)\n            }\n\n            if (node.content == null) {\n                if (next.content == null) {\n                    if (start == 0) {\n                        node.content = p.content\n                        node.del = node.size + middle_del + remaining\n                        resize(node, count_code_points(node.content))\n                    } else {\n                        let x = {\n                            size: 0,\n                            left_size: 0,\n                            content: p.content,\n                            del: node.size - start + middle_del + remaining,\n                        }\n                        resize(node, start)\n                        avl.add(node, \"right\", x)\n                        resize(x, count_code_points(x.content))\n                    }\n                    resize(next, next.size - remaining)\n                } else {\n                    next.del += node.size - start + middle_del\n                    next.content = p.content + next.content.slice(codePoints_to_index(next.content, remaining))\n                    resize(node, start)\n                    if (node.size == 0) avl.del(node)\n                    resize(next, count_code_points(next.content))\n                }\n            } else {\n                if (next.content == null) {\n                    node.del += middle_del + remaining\n                    node.content = node.content.slice(0, codePoints_to_index(node.content, start)) + p.content\n                    resize(node, count_code_points(node.content))\n                    resize(next, next.size - remaining)\n                } else {\n                    node.del += middle_del + next.del\n                    node.content = node.content.slice(0, codePoints_to_index(node.content, start)) + p.content + next.content.slice(codePoints_to_index(next.content, remaining))\n                    resize(node, count_code_points(node.content))\n                    resize(next, 0)\n                    avl.del(next)\n                }\n            }\n        }\n    }\n\n    let new_patches = []\n    let offset = 0\n    let node = avl.root\n    while (node.left) node = node.left\n    while (node) {\n        if (node.content == null) {\n            offset += node.size\n        } else {\n            new_patches.push({\n                unit: patches[0].unit,\n                range: `[${offset}:${offset + node.del}]`,\n                content: node.content,\n            })\n            offset += node.del\n        }\n\n        node = avl.next(node)\n    }\n    return new_patches\n}\n\nfunction create_avl_tree(on_rotate) {\n    let self = { root: { height: 1 } }\n\n    self.calc_height = (node) => {\n        node.height = 1 + Math.max(node.left?.height ?? 0, node.right?.height ?? 0)\n    }\n\n    self.rechild = (child, new_child) => {\n        if (child.parent) {\n            if (child.parent.left == child) {\n                child.parent.left = new_child\n            } else {\n                child.parent.right = new_child\n            }\n        } else {\n            self.root = new_child\n        }\n        if (new_child) new_child.parent = child.parent\n    }\n\n    self.rotate = (node) => {\n        on_rotate(node)\n\n        let parent = node.parent\n        let left = parent.right == node ? \"left\" : \"right\"\n        let right = parent.right == node ? \"right\" : \"left\"\n\n        parent[right] = node[left]\n        if (parent[right]) parent[right].parent = parent\n        self.calc_height(parent)\n\n        self.rechild(parent, node)\n        parent.parent = node\n\n        node[left] = parent\n    }\n\n    self.fix_avl = (node) => {\n        self.calc_height(node)\n        let diff = (node.right?.height ?? 0) - (node.left?.height ?? 0)\n        if (Math.abs(diff) >= 2) {\n            if (diff > 0) {\n                if ((node.right.left?.height ?? 0) > (node.right.right?.height ?? 0)) self.rotate(node.right.left)\n                self.rotate((node = node.right))\n            } else {\n                if ((node.left.right?.height ?? 0) > (node.left.left?.height ?? 0)) self.rotate(node.left.right)\n                self.rotate((node = node.left))\n            }\n            self.fix_avl(node)\n        } else if (node.parent) self.fix_avl(node.parent)\n    }\n\n    self.add = (node, side, add_me) => {\n        let other_side = side == \"left\" ? \"right\" : \"left\"\n        add_me.height = 1\n\n        if (node[side]) {\n            node = node[side]\n            while (node[other_side]) node = node[other_side]\n            node[other_side] = add_me\n        } else {\n            node[side] = add_me\n        }\n        add_me.parent = node\n        self.fix_avl(node)\n    }\n\n    self.del = (node) => {\n        if (node.left && node.right) {\n            let cursor = node.right\n            while (cursor.left) cursor = cursor.left\n            cursor.left = node.left\n\n            // breaks abstraction\n            cursor.left_size = node.left_size\n            let y = cursor\n            while (y.parent != node) {\n                y = y.parent\n                y.left_size -= cursor.size\n            }\n\n            node.left.parent = cursor\n            if (cursor == node.right) {\n                self.rechild(node, cursor)\n                self.fix_avl(cursor)\n            } else {\n                let x = cursor.parent\n                self.rechild(cursor, cursor.right)\n                cursor.right = node.right\n                node.right.parent = cursor\n                self.rechild(node, cursor)\n                self.fix_avl(x)\n            }\n        } else {\n            self.rechild(node, node.left || node.right || null)\n            if (node.parent) self.fix_avl(node.parent)\n        }\n    }\n\n    self.next = (node) => {\n        if (node.right) {\n            node = node.right\n            while (node.left) node = node.left\n            return node\n        } else {\n            while (node.parent && node.parent.right == node) node = node.parent\n            return node.parent\n        }\n    }\n\n    return self\n}\n\nfunction count_code_points(str) {\n  let code_points = 0;\n  for (let i = 0; i < str.length; i++) {\n    if (str.charCodeAt(i) >= 0xD800 && str.charCodeAt(i) <= 0xDBFF) i++;\n    code_points++;\n  }\n  return code_points;\n}\n\nfunction index_to_codePoints(str, index) {\n  let i = 0\n  let c = 0\n  while (i < index && i < str.length) {\n    const charCode = str.charCodeAt(i)\n    i += (charCode >= 0xd800 && charCode <= 0xdbff) ? 2 : 1\n    c++\n  }\n  return c\n}\n\nfunction codePoints_to_index(str, codePoints) {\n  let i = 0\n  let c = 0\n  while (c < codePoints && i < str.length) {\n    const charCode = str.charCodeAt(i)\n    i += (charCode >= 0xd800 && charCode <= 0xdbff) ? 2 : 1\n    c++\n  }\n  return i\n}\n\nmodule.exports = { simple_d_ton }\n"
  },
  {
    "path": "simple_d_ton/package.json",
    "content": "{\n  \"name\": \"simple_d_ton\",\n  \"version\": \"0.0.24\",\n  \"description\": \"Serve diamond-types and simpleton requests.\",\n  \"author\": \"Braid Working Group\",\n  \"repository\": \"braid-org/braidjs\",\n  \"homepage\": \"https://braid.org\",\n  \"main\": \"./index.js\",\n  \"dependencies\": {\n    \"diamond-types-node\": \"^1.0.2\",\n    \"braid-http\": \"^0.3.14\"\n  }\n}\n"
  },
  {
    "path": "simpleton/client.js",
    "content": "// requires braid-http@0.3.14\n// \n// url: simpleton resource endpoint\n//\n// apply_remote_update: ({patches, state}) => {...}\n//     this is for incoming changes;\n//     one of these will be non-null,\n//     and can be applied to the current state.\n//\n// generate_local_diff_update: (prev_state) => {...}\n//     this is to generate outgoing changes,\n//     and if there are changes, returns { patches, state }\n//\n// content_type: overrides the Accept and Content-Type headers\n//\n// returns { changed(): (diff_function) => {...} }\n//     this is for outgoing changes;\n//     diff_function = () => ({patches, new_version}).\n//\nfunction simpleton_client(url, { apply_remote_update, generate_local_diff_update, content_type }) {\n    var peer = Math.random().toString(36).substr(2)\n    var current_version = []\n    var prev_state = \"\"\n    var char_counter = -1\n    var outstanding_changes = 0\n    var max_outstanding_changes = 10\n\n    braid_fetch_wrapper(url, {\n        headers: { \"Merge-Type\": \"simpleton\",\n            ...(content_type ? {Accept: content_type} : {}) },\n        subscribe: true,\n        retry: true,\n        parents: () => current_version.length ? current_version : null,\n        peer\n    }).then(res =>\n        res.subscribe(update => {\n            // Only accept the update if its parents == our current version\n            update.parents.sort()\n            if (current_version.length === update.parents.length\n                && current_version.every((v, i) => v === update.parents[i])) {\n                current_version = update.version.sort()\n                update.state = update.body\n\n                if (update.patches) {\n                    for (let p of update.patches) p.range = p.range.match(/\\d+/g).map((x) => 1 * x)\n                    update.patches.sort((a, b) => a.range[0] - b.range[0])\n\n                    // convert from code-points to js-indicies\n                    let c = 0\n                    let i = 0\n                    for (let p of update.patches) {\n                        while (c < p.range[0]) {\n                            i += get_char_size(prev_state, i)\n                            c++\n                        }\n                        p.range[0] = i\n\n                        while (c < p.range[1]) {\n                            i += get_char_size(prev_state, i)\n                            c++\n                        }\n                        p.range[1] = i\n                    }\n                }\n\n                prev_state = apply_remote_update(update)\n            }\n        })\n    )\n    \n    return {\n      changed: async () => {\n        if (outstanding_changes >= max_outstanding_changes) return\n        while (true) {\n            var update = generate_local_diff_update(prev_state)\n            if (!update) return   // Stop if there wasn't a change!\n            var {patches, state} = update\n\n            // convert from js-indicies to code-points\n            let c = 0\n            let i = 0\n            for (let p of patches) {\n                while (i < p.range[0]) {\n                    i += get_char_size(prev_state, i)\n                    c++\n                }\n                p.range[0] = c\n\n                while (i < p.range[1]) {\n                    i += get_char_size(prev_state, i)\n                    c++\n                }\n                p.range[1] = c\n\n                char_counter += p.range[1] - p.range[0]\n                char_counter += count_code_points(p.content)\n\n                p.unit = \"text\"\n                p.range = `[${p.range[0]}:${p.range[1]}]`\n            }\n\n            var version = [peer + \"-\" + char_counter]\n\n            var parents = current_version\n            current_version = version\n            prev_state = state\n\n            outstanding_changes++\n            await braid_fetch_wrapper(url, {\n                headers: { \"Merge-Type\": \"simpleton\",\n                    ...(content_type ? {\"Content-Type\": content_type} : {}) },\n                method: \"PUT\",\n                retry: true,\n                version, parents, patches,\n                peer\n            })\n            outstanding_changes--\n        }\n      }\n    }\n}\n\nfunction get_char_size(s, i) {\n    const charCode = s.charCodeAt(i)\n    return (charCode >= 0xd800 && charCode <= 0xdbff) ? 2 : 1\n}\n\nfunction count_code_points(str) {\n    let code_points = 0\n    for (let i = 0; i < str.length; i++) {\n        if (str.charCodeAt(i) >= 0xd800 && str.charCodeAt(i) <= 0xdbff) i++\n        code_points++\n    }\n    return code_points\n}\n\nasync function braid_fetch_wrapper(url, params) {\n    if (!params.retry) throw \"wtf\"\n    var waitTime = 10\n    if (params.subscribe) {\n        var subscribe_handler = null\n        connect()\n        async function connect() {\n            try {\n                var c = await braid_fetch(url, { ...params, parents: params.parents?.() })\n                c.subscribe((...args) => subscribe_handler?.(...args), on_error)\n                waitTime = 10\n            } catch (e) {\n                on_error(e)\n            }\n        }\n        function on_error(e) {\n          console.log('eee = ' + e.stack)\n            setTimeout(connect, waitTime)\n            waitTime = Math.min(waitTime * 2, 3000)\n        }\n        return {subscribe: handler => { subscribe_handler = handler }}\n    } else {\n        return new Promise((done) => {\n            send()\n            async function send() {\n                try {\n                    var res = await braid_fetch(url, params)\n                    if (res.status !== 200) throw \"status not 200: \" + res.status\n                    done(res)\n                } catch (e) {\n                    setTimeout(send, waitTime)\n                    waitTime = Math.min(waitTime * 2, 3000)\n                }\n            }\n        })\n    }\n}\n"
  },
  {
    "path": "simpleton/demo.js",
    "content": "console.log(\"v9\")\n\nprocess.on(\"uncaughtException\", (e) => console.log(e.stack))\nprocess.on(\"unhandledRejection\", (e) => console.log(e.stack))\n\nlet simpleton_handle = require(\"./simpleton_lib.js\").handle\n\nvar port = 61870\n\nlet cpu_usage = 0\nif (true) {\n    require(\"child_process\").execSync(`npm install os-utils`, {\n        stdio: \"inherit\",\n    })\n\n    var os = require(\"os-utils\")\n    os.cpuUsage((x) => (cpu_usage = x))\n    setInterval(() => {\n        os.cpuUsage((x) => (cpu_usage = x))\n    }, 1000)\n}\n\nconst server = require(\"http2\").createSecureServer(\n    {\n        key: require(\"fs\").readFileSync(\"./privkey.pem\"),\n        cert: require(\"fs\").readFileSync(\"./fullchain.pem\"),\n        allowHTTP1: true,\n    },\n    async (req, res) => {\n        let silent = req.url == \"//time\"\n\n        if (!silent)\n            console.log(\n                `${req.method} ${req.url} v:${\n                    req.headers[\"Version\"] || req.headers[\"version\"] || \"\"\n                }`\n            )\n\n        res.setHeader(\"Access-Control-Allow-Origin\", \"*\")\n        res.setHeader(\"Access-Control-Allow-Methods\", \"*\")\n        res.setHeader(\"Access-Control-Allow-Headers\", \"*\")\n        res.statusCode = 200\n\n        if (!silent) console.log(\"req.headers: \" + JSON.stringify(req.headers))\n\n        if (req.method == \"OPTIONS\") {\n            return res.end(\"ok\")\n        }\n\n        if (req.method == \"GET\" && req.url == `//time`) {\n            res.setHeader(\"Content-Type\", \"application/json\")\n            return res.end(JSON.stringify({ time: Date.now(), cpu_usage }))\n        }\n\n        return simpleton_handle(req.url, req, res)\n    }\n)\n\nserver.listen(port, () => {\n    console.log(`server started on port ${port}`)\n})\n"
  },
  {
    "path": "simpleton/index.js",
    "content": "module.exports = {\n    create_simpleton_client: require('./client').create_simpleton_client,\n    handle: require('./server').handle\n}\n"
  },
  {
    "path": "simpleton/index.mjs",
    "content": "import client from './client.js'\nimport server from './server.js'\n\nvar create_simpleton_client = client.create_simpleton_client\nvar handle = server.handle\n\nexport { create_simpleton_client, handle }\nexport default { create_simpleton_client, handle }\n"
  },
  {
    "path": "simpleton/package.json",
    "content": "{\n  \"name\": \"simpleton_braid\",\n  \"version\": \"0.2.2\",\n  \"description\": \"An implementation of the simpleton protocol for Node.js and Browsers\",\n  \"author\": \"Braid Working Group\",\n  \"repository\": \"braid-org/braidjs\",\n  \"homepage\": \"https://braid.org\",\n  \"files\": [\n    \"client.js\",\n    \"server.js\",\n    \"index.js\",\n    \"index.mjs\"\n  ],\n  \"main\": \"./index.js\",\n  \"exports\": {\n    \"require\": \"./index.js\",\n    \"import\": \"./index.mjs\"\n  },\n  \"dependencies\": {\n    \"diamond-types-node\": \"^1.0.2\",\n    \"braid-http\": \"^0.3.3\"\n  }\n}\n"
  },
  {
    "path": "simpleton/server.js",
    "content": "\nconsole.log(\"simpleton.js: v163\")\n\nlet { Doc, Branch, OpLog } = require(\"diamond-types-node\")\nlet braidify = require(\"braid-http\").http_server\nlet fs = require(\"fs\")\n\nlet waiting_puts = 0\nlet prev_put_p = null\n\nasync function handle(req, res, options = {}) {\n    options = {\n        db_folder: null,                 // Default db_folder\n        key: req.url.split('?')[0],      // Default key\n        ...options                       // Override with all options passed in\n    }\n\n    let start_time = Date.now()\n\n    let resource = await get_resource(options.key, options.db_folder)\n\n    braidify(req, res)\n\n    let peer = req.headers[\"peer\"]\n    res.my_peer = peer\n\n    res.setHeader(\"Access-Control-Allow-Origin\", \"*\")\n    res.setHeader(\"Access-Control-Allow-Methods\", \"*\")\n    res.setHeader(\"Access-Control-Allow-Headers\", \"*\")\n\n    function my_end(statusCode, x) {\n        res.statusCode = statusCode\n        res.end(x)\n    }\n\n    if (req.method == \"OPTIONS\") return my_end(200)\n\n    if (req.method == \"DELETE\") {\n        await resource.delete_me()\n        return my_end(200, \"\")\n    }\n\n    if ((req.method == \"GET\" || req.method == \"HEAD\") && req.subscribe) {\n        res.setHeader(\"Content-Type\", \"text/plain\")\n        res.setHeader(\"Editable\", \"true\")\n        res.setHeader(\"Merge-Type\", \"simpleton\")\n\n        if (req.method == \"HEAD\") return my_end(200)\n\n        res.startSubscription({\n            onClose: (_) => resource.clients.delete(res),\n        })\n\n        let version = resource.doc.getRemoteVersion().map((x) => encode_version(...x))\n        let x = { version }\n\n        if (!req.parents && !req.version) {\n            x.parents = []\n            x.body = resource.doc.get()\n            res.sendVersion(x)\n        } else {\n            x.parents = req.version ? req.version : req.parents\n            res.my_last_seen_version = x.parents\n\n            // only send them a version from these parents if we have these parents (otherwise we'll assume these parents are more recent, probably versions they created but haven't sent us yet, and we'll send them appropriate rebased updates when they send us these versions)\n            let local_version = OpLog_remote_to_local(resource.doc, x.parents)\n            if (local_version) {\n                x.patches = get_xf_patches(resource.doc, local_version)\n                res.sendVersion(x)\n            }\n        }\n\n        res.my_last_sent_version = version\n        resource.clients.add(res)\n        return\n    }\n\n    if ((req.method == \"GET\" || req.method == \"HEAD\") && !req.subscribe) {\n        res.setHeader(\"Accept-Subscribe\", \"true\")\n\n        let doc = resource.doc\n        const buffer = Buffer.from(doc.get(), \"utf8\")\n\n        res.setHeader(\"Content-Type\", \"text/plain\")\n        res.setHeader(\"Content-Length\", buffer.length)\n\n        res.setHeader(\n            \"Version\",\n            doc\n                .getRemoteVersion()\n                .map((x) => encode_version(...x))\n                .map((x) => JSON.stringify(x))\n                .join(\", \")\n        )\n\n        if (req.method == \"HEAD\") return my_end(200)\n\n        return my_end(200, buffer)\n    }\n\n    if (req.method == \"PUT\" || req.method == \"POST\" || req.method == \"PATCH\") {\n        let wait_time = 0\n\n        if (waiting_puts >= 100) {\n            console.log(`The server is busy.`)\n            return my_end(503, \"The server is busy.\")\n        }\n\n        waiting_puts++\n        console.log(`waiting_puts(after++) = ${waiting_puts}`)\n\n        let my_prev_put_p = prev_put_p\n        let done_my_turn = null\n        prev_put_p = new Promise(\n            (done) =>\n            (done_my_turn = (statusCode, x) => {\n                waiting_puts--\n                console.log(`waiting_puts(after--) = ${waiting_puts}`)\n                x.wait_time = wait_time\n                x.server_time_taken = Date.now() - start_time\n                my_end(statusCode, x)\n                done()\n            })\n        )\n        let patches = await req.patches()\n        await my_prev_put_p\n\n        wait_time = Date.now() - start_time\n        start_time = Date.now()\n\n        let og_patches = patches\n        patches = patches.map((p) => ({\n            ...p,\n            range: p.range.match(/\\d+/g).map((x) => parseInt(x)),\n        }))\n\n        let og_v = req.version[0]\n\n        // reduce the version sequence by the number of char-edits\n        let v = decode_version(og_v)\n        v = encode_version(v[0], v[1] + 1 - patches.reduce((a, b) => a + b.content.length + (b.range[1] - b.range[0]), 0))\n\n        let ps = req.parents\n        if (!ps?.length) ps = [\"root\"]\n\n        let v_before = resource.doc.getLocalVersion()\n        let parents = resource.doc.getRemoteVersion().map((x) => encode_version(...x))\n\n        let bytes = []\n\n        let offset = 0\n        for (let p of patches) {\n            // delete\n            for (let i = p.range[0]; i < p.range[1]; i++) {\n                bytes.push(OpLog_create_bytes(v, ps, p.range[1] - 1 + offset, null))\n                offset--\n                ps = [v]\n                v = decode_version(v)\n                v = encode_version(v[0], v[1] + 1)\n            }\n            // insert\n            for (let i = 0; i < p.content?.length ?? 0; i++) {\n                let c = p.content[i]\n                bytes.push(OpLog_create_bytes(v, ps, p.range[1] + offset, c))\n                offset++\n                ps = [v]\n                v = decode_version(v)\n                v = encode_version(v[0], v[1] + 1)\n            }\n        }\n\n        try {\n            for (let b of bytes) resource.doc.mergeBytes(b)\n        } catch (e) {\n            console.log(`EEE= ${e}:${e.stack}`)\n            // we couldn't apply the version, presumably because we're missing its parents.\n            // we want to send a 4XX error, so the client will resend this request later,\n            // hopefully after we've received the necessary parents.\n\n            // here are some 4XX error code options..\n            //\n            // - 425 Too Early\n            //     - pros: our message is too early\n            //     - cons: associated with some \"Early-Data\" http thing, which we're not using\n            // - 400 Bad Request\n            //     - pros: pretty generic\n            //     - cons: implies client shouldn't resend as-is\n            // - 409 Conflict\n            //     - pros: doesn't imply modifications needed\n            //     - cons: the message is not conflicting with anything\n            // - 412 Precondition Failed\n            //     - pros: kindof true.. the precondition of having another version has failed..\n            //     - cons: not strictly true, as this code is associated with http's If-Unmodified-Since stuff\n            // - 422 Unprocessable Content\n            //     - pros: it's true\n            //     - cons: implies client shouldn't resend as-is (at least, it says that here: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/422)\n            // - 428 Precondition Required\n            //     - pros: the name sounds right\n            //     - cons: typically implies that the request was missing an http conditional field like If-Match. that is to say, it implies that the request is missing a precondition, not that the server is missing a precondition\n            return done_my_turn(425, \"The server is missing the parents of this version.\")\n        }\n\n        resource.need_defrag = true\n\n        let v_after = resource.doc.getLocalVersion()\n        if (JSON.stringify(v_before) === JSON.stringify(v_after)) {\n            console.log(`we got a version we already had: ${v_before}`)\n            return done_my_turn(200, \"\")\n        }\n\n        await resource.db_delta(resource.doc.getPatchSince(v_before))\n\n        patches = get_xf_patches(resource.doc, v_before)\n        console.log(JSON.stringify({ patches }))\n\n        let version = resource.doc.getRemoteVersion().map((x) => encode_version(...x))\n\n        for (let client of resource.clients) {\n            if (client.my_peer == peer) {\n                client.my_last_seen_version = [og_v]\n            }\n\n            function set_timeout(time_override) {\n                if (client.my_timeout) clearTimeout(client.my_timeout)\n                client.my_timeout = setTimeout(() => {\n                    let version = resource.doc.getRemoteVersion().map((x) => encode_version(...x))\n                    let x = { version }\n                    x.parents = client.my_last_seen_version\n\n                    console.log(\"rebasing after timeout.. \")\n                    console.log(\"    client.my_unused_version_count = \" + client.my_unused_version_count)\n                    x.patches = get_xf_patches(resource.doc, OpLog_remote_to_local(resource.doc, client.my_last_seen_version))\n\n                    console.log(`sending from rebase: ${JSON.stringify(x)}`)\n                    client.sendVersion(x)\n                    client.my_last_sent_version = x.version\n\n                    delete client.my_timeout\n                }, time_override ?? Math.min(3000, 23 * Math.pow(1.5, client.my_unused_version_count - 1)))\n            }\n\n            if (client.my_timeout) {\n                if (client.my_peer == m.peer) {\n                    if (!v_eq(client.my_last_sent_version, req.parents)) {\n                        // note: we don't add to client.my_unused_version_count,\n                        // because we're already in a timeout;\n                        // we'll just extend it here..\n                        set_timeout()\n                    } else {\n                        // hm.. it appears we got a correctly parented version,\n                        // which suggests that maybe we can stop the timeout early\n                        set_timeout(0)\n                    }\n                }\n                continue\n            }\n\n            let x = { version }\n            if (client.my_peer == peer) {\n                if (!v_eq(client.my_last_sent_version, req.parents)) {\n                    client.my_unused_version_count = (client.my_unused_version_count ?? 0) + 1\n                    set_timeout()\n                    continue\n                } else {\n                    delete client.my_unused_version_count\n                }\n\n                x.parents = req.version\n                if (!v_eq(version, req.version)) {\n                    console.log(\"rebasing..\")\n                    x.patches = get_xf_patches(resource.doc, OpLog_remote_to_local(resource.doc, [og_v]))\n                } else {\n                    // this client already has this version,\n                    // so let's pretend to send it back, but not\n                    console.log(`not reflecting back to simpleton`)\n                    client.my_last_sent_version = x.version\n                    continue\n                }\n            } else {\n                x.parents = parents\n                x.patches = patches\n            }\n            console.log(`sending: ${JSON.stringify(x)}`)\n            client.sendVersion(x)\n            client.my_last_sent_version = x.version\n        }\n\n\n        return done_my_turn(200, \"\")\n    }\n\n    throw new Error(\"unknown\")\n}\n\nasync function get_resource(key, db_folder) {\n    let cache = get_resource.cache || (get_resource.cache = {})\n    if (cache[key]) return cache[key]\n\n    let resource = {}\n    resource.clients = new Set()\n    resource.simpleton_clients = new Set()\n\n    resource.doc = new Doc(\"server\")\n\n    let { change, delete_me } = db_folder\n        ? await file_sync(\n            db_folder,\n            encodeURIComponent(key),\n            (bytes) => resource.doc.mergeBytes(bytes),\n            () => resource.doc.toBytes()\n        )\n        : { change: () => { }, delete_me: () => { } }\n\n    resource.db_delta = change\n\n    resource.doc = defrag_dt(resource.doc)\n    resource.need_defrag = false\n\n    resource.delete_me = () => {\n        delete_me()\n        delete cache[key]\n    }\n\n    return (cache[key] = resource)\n}\n\nasync function file_sync(db_folder, filename_base, process_delta, get_init) {\n    let currentNumber = 0\n    let currentSize = 0\n    let threshold = 0\n\n    // Ensure the existence of db_folder\n    try {\n        await fs.promises.access(db_folder);\n    } catch (err) {\n        if (err.code === 'ENOENT') {\n            await fs.promises.mkdir(db_folder, { recursive: true });\n        } else {\n            throw err;\n        }\n    }\n\n    // Read existing files and sort by numbers.\n    async function get_sorted_files() {\n        let re = new RegExp(\"^\" + filename_base.replace(/[^a-zA-Z0-9]/g, \"\\\\$&\") + \"\\\\.\\\\d+$\")\n        return (await fs.promises.readdir(db_folder))\n            .filter((a) => re.test(a))\n            .sort((a, b) => parseInt(a.match(/\\d+$/)[0]) - parseInt(b.match(/\\d+$/)[0]))\n            .map((a) => `${db_folder}/${a}`)\n    }\n\n    const files = await get_sorted_files()\n\n    // Try to process files starting from the highest number.\n    let done = false\n    for (let i = files.length - 1; i >= 0; i--) {\n        if (done) {\n            await fs.promises.unlink(files[i])\n            continue\n        }\n        try {\n            const filename = files[i]\n            console.log(`trying to process file: ${filename}`)\n            const data = await fs.promises.readFile(filename)\n\n            let cursor = 0\n            let isFirstChunk = true\n            while (cursor < data.length) {\n                const chunkSize = data.readUInt32LE(cursor)\n                cursor += 4\n                const chunk = data.slice(cursor, cursor + chunkSize)\n                cursor += chunkSize\n\n                if (isFirstChunk) {\n                    isFirstChunk = false\n                    threshold = chunkSize * 10\n                }\n                process_delta(chunk)\n            }\n\n            currentSize = data.length\n            currentNumber = parseInt(filename.match(/\\d+$/)[0])\n            done = true\n        } catch (error) {\n            console.error(`Error processing file: ${files[i]}`)\n            await fs.promises.unlink(files[i])\n        }\n    }\n\n    return {\n        change: async (bytes) => {\n            currentSize += bytes.length + 4 // we account for the extra 4 bytes for uint32\n            const filename = `${db_folder}/${filename_base}.${currentNumber}`\n            if (currentSize < threshold) {\n                console.log(`appending to db..`)\n\n                let buffer = Buffer.allocUnsafe(4)\n                buffer.writeUInt32LE(bytes.length, 0)\n                await fs.promises.appendFile(filename, buffer)\n                await fs.promises.appendFile(filename, bytes)\n\n                console.log(\"wrote to : \" + filename)\n            } else {\n                try {\n                    console.log(`starting new db..`)\n\n                    currentNumber++\n                    const init = get_init()\n                    const buffer = Buffer.allocUnsafe(4)\n                    buffer.writeUInt32LE(init.length, 0)\n\n                    const newFilename = `${db_folder}/${filename_base}.${currentNumber}`\n                    await fs.promises.writeFile(newFilename, buffer)\n                    await fs.promises.appendFile(newFilename, init)\n\n                    console.log(\"wrote to : \" + newFilename)\n\n                    currentSize = 4 + init.length\n                    threshold = currentSize * 10\n                    try {\n                        await fs.promises.unlink(filename)\n                    } catch (e) { }\n                } catch (e) {\n                    console.log(`e = ${e.stack}`)\n                }\n            }\n        },\n        delete_me: async () => {\n            await Promise.all(\n                (\n                    await get_sorted_files()\n                ).map((file) => {\n                    return new Promise((resolve, reject) => {\n                        fs.unlink(file, (err) => {\n                            if (err) {\n                                console.error(`Error deleting file: ${file}`)\n                                reject(err)\n                            } else {\n                                console.log(`Deleted file: ${file}`)\n                                resolve()\n                            }\n                        })\n                    })\n                })\n            )\n        },\n    }\n}\n\n//////////////////////////////////////////////////////////////////\n//////////////////////////////////////////////////////////////////\n//////////////////////////////////////////////////////////////////\n\nfunction defrag_dt(doc) {\n    let fresh_doc = new Doc(\"server\")\n    fresh_doc.mergeBytes(doc.toBytes())\n    return fresh_doc\n}\n\nfunction parseDT(byte_array) {\n    if (new TextDecoder().decode(new Uint8Array(byte_array.splice(0, 8))) !== \"DMNDTYPS\") throw new Error(\"dt parse error, expected DMNDTYPS\")\n\n    if (byte_array.shift() != 0) throw new Error(\"dt parse error, expected version 0\")\n\n    let agents = []\n    let versions = []\n    let parentss = []\n\n    while (byte_array.length) {\n        let id = byte_array.shift()\n        let len = read_varint(byte_array)\n        if (id == 1) {\n        } else if (id == 3) {\n            let goal = byte_array.length - len\n            while (byte_array.length > goal) {\n                agents.push(read_string(byte_array))\n            }\n        } else if (id == 20) {\n        } else if (id == 21) {\n            let seqs = {}\n            let goal = byte_array.length - len\n            while (byte_array.length > goal) {\n                let part0 = read_varint(byte_array)\n                let has_jump = part0 & 1\n                let agent_i = (part0 >> 1) - 1\n                let run_length = read_varint(byte_array)\n                let jump = 0\n                if (has_jump) {\n                    let part2 = read_varint(byte_array)\n                    jump = part2 >> 1\n                    if (part2 & 1) jump *= -1\n                }\n                let base = (seqs[agent_i] || 0) + jump\n\n                for (let i = 0; i < run_length; i++) {\n                    versions.push([agents[agent_i], base + i])\n                }\n                seqs[agent_i] = base + run_length\n            }\n        } else if (id == 23) {\n            let count = 0\n            let goal = byte_array.length - len\n            while (byte_array.length > goal) {\n                let run_len = read_varint(byte_array)\n\n                let parents = []\n                let has_more = 1\n                while (has_more) {\n                    let x = read_varint(byte_array)\n                    let is_foreign = 0x1 & x\n                    has_more = 0x2 & x\n                    let num = x >> 2\n\n                    if (x == 1) {\n                        parents.push([\"root\"])\n                    } else if (!is_foreign) {\n                        parents.push(versions[count - num])\n                    } else {\n                        parents.push([agents[num - 1], read_varint(byte_array)])\n                    }\n                }\n                parentss.push(parents)\n                count++\n\n                for (let i = 0; i < run_len - 1; i++) {\n                    parentss.push([versions[count - 1]])\n                    count++\n                }\n            }\n        } else {\n            byte_array.splice(0, len)\n        }\n    }\n\n    function read_string(byte_array) {\n        return new TextDecoder().decode(new Uint8Array(byte_array.splice(0, read_varint(byte_array))))\n    }\n\n    function read_varint(byte_array) {\n        let result = 0\n        let shift = 0\n        while (true) {\n            if (byte_array.length === 0)\n                throw new Error(\"byte array does not contain varint\")\n\n            let byte_val = byte_array.shift()\n            result |= (byte_val & 0x7f) << shift\n            if ((byte_val & 0x80) == 0) return result\n            shift += 7\n        }\n    }\n\n    return [agents, versions, parentss]\n}\n\nfunction OpLog_create_bytes(version, parents, pos, ins) {\n    function write_varint(bytes, value) {\n        while (value >= 0x80) {\n            bytes.push((value & 0x7f) | 0x80)\n            value >>= 7\n        }\n        bytes.push(value)\n    }\n\n    function write_string(byte_array, str) {\n        let str_bytes = new TextEncoder().encode(str)\n        write_varint(byte_array, str_bytes.length)\n        byte_array.push(...str_bytes)\n    }\n\n    version = decode_version(version)\n    parents = parents.map(decode_version)\n\n    let bytes = []\n    bytes = bytes.concat(Array.from(new TextEncoder().encode(\"DMNDTYPS\")))\n    bytes.push(0)\n\n    let file_info = []\n    let agent_names = []\n\n    let agents = new Set()\n    agents.add(version[0])\n    for (let p of parents) if (p.length > 1) agents.add(p[0])\n    agents = [...agents]\n\n    //   console.log(JSON.stringify({ agents, parents }, null, 4));\n\n    let agent_to_i = {}\n    for (let [i, agent] of agents.entries()) {\n        agent_to_i[agent] = i\n        write_string(agent_names, agent)\n    }\n\n    file_info.push(3)\n    write_varint(file_info, agent_names.length)\n    file_info.push(...agent_names)\n\n    bytes.push(1)\n    write_varint(bytes, file_info.length)\n    bytes.push(...file_info)\n\n    let branch = []\n\n    if (parents[0]?.length > 1) {\n        let frontier = []\n\n        for (let [i, [agent, seq]] of parents.entries()) {\n            let has_more = i < parents.length - 1\n            let mapped = agent_to_i[agent]\n            let n = ((mapped + 1) << 1) | (has_more ? 1 : 0)\n            write_varint(frontier, n)\n            write_varint(frontier, seq)\n        }\n\n        branch.push(12)\n        write_varint(branch, frontier.length)\n        branch.push(...frontier)\n    }\n\n    bytes.push(10)\n    write_varint(bytes, branch.length)\n    bytes.push(...branch)\n\n    let patches = []\n\n    if (ins) {\n        let inserted_content_bytes = []\n\n        inserted_content_bytes.push(0) // ins (not del, which is 1)\n\n        inserted_content_bytes.push(13) // \"content\" enum (rather than compressed)\n\n        let encoder = new TextEncoder()\n        let utf8Bytes = encoder.encode(ins)\n\n        inserted_content_bytes.push(1 + utf8Bytes.length) // length of content chunk\n        inserted_content_bytes.push(4) // \"plain text\" enum\n\n        for (let b of utf8Bytes) inserted_content_bytes.push(b) // actual text\n\n        inserted_content_bytes.push(25) // \"known\" enum\n        inserted_content_bytes.push(1) // length of \"known\" chunk\n        inserted_content_bytes.push(3) // content of length 1, and we \"know\" it\n\n        patches.push(24)\n        write_varint(patches, inserted_content_bytes.length)\n        patches.push(...inserted_content_bytes)\n    }\n\n    // write in the version\n    let version_bytes = []\n\n    let [agent, seq] = version\n    let agent_i = agent_to_i[agent]\n    let jump = seq\n\n    write_varint(version_bytes, ((agent_i + 1) << 1) | (jump != 0 ? 1 : 0))\n    write_varint(version_bytes, 1)\n    if (jump) write_varint(version_bytes, jump << 1)\n\n    patches.push(21)\n    write_varint(patches, version_bytes.length)\n    patches.push(...version_bytes)\n\n    // write in \"op\" bytes (some encoding of position)\n    let op_bytes = []\n\n    write_varint(op_bytes, (pos << 4) | (pos ? 2 : 0) | (ins ? 0 : 4))\n\n    patches.push(22)\n    write_varint(patches, op_bytes.length)\n    patches.push(...op_bytes)\n\n    // write in parents\n    let parents_bytes = []\n\n    write_varint(parents_bytes, 1)\n\n    if (parents[0]?.length > 1) {\n        for (let [i, [agent, seq]] of parents.entries()) {\n            let has_more = i < parents.length - 1\n            let agent_i = agent_to_i[agent]\n            write_varint(\n                parents_bytes,\n                ((agent_i + 1) << 2) | (has_more ? 2 : 0) | 1\n            )\n            write_varint(parents_bytes, seq)\n        }\n    } else write_varint(parents_bytes, 1)\n\n    patches.push(23)\n    write_varint(patches, parents_bytes.length)\n    patches.push(...parents_bytes)\n\n    // write in patches\n    bytes.push(20)\n    write_varint(bytes, patches.length)\n    bytes.push(...patches)\n\n    //   console.log(bytes);\n\n    return bytes\n}\n\nfunction OpLog_remote_to_local(doc, frontier) {\n    let map = Object.fromEntries(frontier.map((x) => [x, true]))\n\n    let local_version = []\n    let [agents, versions, parentss] = parseDT([...doc.toBytes()])\n    for (let i = 0; i < versions.length; i++) {\n        if (map[doc.localToRemoteVersion([i])[0].join(\"-\")]) {\n            local_version.push(i)\n        }\n    }\n\n    return (\n        frontier.length == local_version.length &&\n        new Uint32Array(local_version)\n    )\n}\n\nfunction encode_version(agent, seq) {\n    return agent + \"-\" + seq\n}\n\nfunction decode_version(v) {\n    let a = v.split(\"-\")\n    if (a.length > 1) a[1] = parseInt(a[1])\n    return a\n}\n\nfunction v_eq(v1, v2) {\n    return v1.length == v2.length && v1.every((x, i) => x == v2[i])\n}\n\nfunction get_xf_patches(doc, v) {\n    let patches = []\n    for (let xf of doc.xfSince(v)) {\n        patches.push(\n            xf.kind == \"Ins\"\n                ? {\n                    unit: \"text\",\n                    range: `[${xf.start}:${xf.start}]`,\n                    content: xf.content,\n                }\n                : {\n                    unit: \"text\",\n                    range: `[${xf.start}:${xf.end}]`,\n                    content: \"\",\n                }\n        )\n    }\n    return relative_to_absolute_patches(patches)\n}\n\nfunction relative_to_absolute_patches(patches) {\n    let avl = create_avl_tree((node) => {\n        let parent = node.parent\n        if (parent.left == node) {\n            parent.left_size -= node.left_size + node.size\n        } else {\n            node.left_size += parent.left_size + parent.size\n        }\n    })\n    avl.root.size = Infinity\n    avl.root.left_size = 0\n\n    function resize(node, new_size) {\n        if (node.size == new_size) return\n        let delta = new_size - node.size\n        node.size = new_size\n        while (node.parent) {\n            if (node.parent.left == node) node.parent.left_size += delta\n            node = node.parent\n        }\n    }\n\n    for (let p of patches) {\n        let [start, end] = p.range.match(/\\d+/g).map((x) => 1 * x)\n        let del = end - start\n\n        let node = avl.root\n        while (true) {\n            if (\n                start < node.left_size ||\n                (node.left && node.content == null && start == node.left_size)\n            ) {\n                node = node.left\n            } else if (\n                start > node.left_size + node.size ||\n                (node.content == null && start == node.left_size + node.size)\n            ) {\n                start -= node.left_size + node.size\n                node = node.right\n            } else {\n                start -= node.left_size\n                break\n            }\n        }\n\n        let remaining = start + del - node.size\n        if (remaining < 0) {\n            if (node.content == null) {\n                if (start > 0) {\n                    let x = { size: 0, left_size: 0 }\n                    avl.add(node, \"left\", x)\n                    resize(x, start)\n                }\n                let x = { size: 0, left_size: 0, content: p.content, del }\n                avl.add(node, \"left\", x)\n                resize(x, x.content.length)\n                resize(node, node.size - (start + del))\n            } else {\n                node.content =\n                    node.content.slice(0, start) +\n                    p.content +\n                    node.content.slice(start + del)\n                resize(node, node.content.length)\n            }\n        } else {\n            let next\n            let middle_del = 0\n            while (remaining >= (next = avl.next(node)).size) {\n                remaining -= next.size\n                middle_del += next.del ?? next.size\n                resize(next, 0)\n                avl.del(next)\n            }\n\n            if (node.content == null) {\n                if (next.content == null) {\n                    if (start == 0) {\n                        node.content = p.content\n                        node.del = node.size + middle_del + remaining\n                        resize(node, node.content.length)\n                    } else {\n                        let x = {\n                            size: 0,\n                            left_size: 0,\n                            content: p.content,\n                            del: node.size - start + middle_del + remaining,\n                        }\n                        resize(node, start)\n                        avl.add(node, \"right\", x)\n                        resize(x, x.content.length)\n                    }\n                    resize(next, next.size - remaining)\n                } else {\n                    next.del += node.size - start + middle_del\n                    next.content = p.content + next.content.slice(remaining)\n                    resize(node, start)\n                    if (node.size == 0) avl.del(node)\n                    resize(next, next.content.length)\n                }\n            } else {\n                if (next.content == null) {\n                    node.del += middle_del + remaining\n                    node.content = node.content.slice(0, start) + p.content\n                    resize(node, node.content.length)\n                    resize(next, next.size - remaining)\n                } else {\n                    node.del += middle_del + next.del\n                    node.content =\n                        node.content.slice(0, start) +\n                        p.content +\n                        next.content.slice(remaining)\n                    resize(node, node.content.length)\n                    resize(next, 0)\n                    avl.del(next)\n                }\n            }\n        }\n    }\n\n    let new_patches = []\n    let offset = 0\n    let node = avl.root\n    while (node.left) node = node.left\n    while (node) {\n        if (node.content == null) {\n            offset += node.size\n        } else {\n            new_patches.push({\n                unit: patches[0].unit,\n                range: `[${offset}:${offset + node.del}]`,\n                content: node.content,\n            })\n            offset += node.del\n        }\n\n        node = avl.next(node)\n    }\n    return new_patches\n}\n\nfunction create_avl_tree(on_rotate) {\n    let self = { root: { height: 1 } }\n\n    self.calc_height = (node) => {\n        node.height =\n            1 + Math.max(node.left?.height ?? 0, node.right?.height ?? 0)\n    }\n\n    self.rechild = (child, new_child) => {\n        if (child.parent) {\n            if (child.parent.left == child) {\n                child.parent.left = new_child\n            } else {\n                child.parent.right = new_child\n            }\n        } else {\n            self.root = new_child\n        }\n        if (new_child) new_child.parent = child.parent\n    }\n\n    self.rotate = (node) => {\n        on_rotate(node)\n\n        let parent = node.parent\n        let left = parent.right == node ? \"left\" : \"right\"\n        let right = parent.right == node ? \"right\" : \"left\"\n\n        parent[right] = node[left]\n        if (parent[right]) parent[right].parent = parent\n        self.calc_height(parent)\n\n        self.rechild(parent, node)\n        parent.parent = node\n\n        node[left] = parent\n    }\n\n    self.fix_avl = (node) => {\n        self.calc_height(node)\n        let diff = (node.right?.height ?? 0) - (node.left?.height ?? 0)\n        if (Math.abs(diff) >= 2) {\n            if (diff > 0) {\n                if (\n                    (node.right.left?.height ?? 0) >\n                    (node.right.right?.height ?? 0)\n                )\n                    self.rotate(node.right.left)\n                self.rotate((node = node.right))\n            } else {\n                if (\n                    (node.left.right?.height ?? 0) >\n                    (node.left.left?.height ?? 0)\n                )\n                    self.rotate(node.left.right)\n                self.rotate((node = node.left))\n            }\n            self.fix_avl(node)\n        } else if (node.parent) self.fix_avl(node.parent)\n    }\n\n    self.add = (node, side, add_me) => {\n        let other_side = side == \"left\" ? \"right\" : \"left\"\n        add_me.height = 1\n\n        if (node[side]) {\n            node = node[side]\n            while (node[other_side]) node = node[other_side]\n            node[other_side] = add_me\n        } else {\n            node[side] = add_me\n        }\n        add_me.parent = node\n        self.fix_avl(node)\n    }\n\n    self.del = (node) => {\n        if (node.left && node.right) {\n            let cursor = node.right\n            while (cursor.left) cursor = cursor.left\n            cursor.left = node.left\n\n            // breaks abstraction\n            cursor.left_size = node.left_size\n            let y = cursor\n            while (y.parent != node) {\n                y = y.parent\n                y.left_size -= cursor.size\n            }\n\n            node.left.parent = cursor\n            if (cursor == node.right) {\n                self.rechild(node, cursor)\n                self.fix_avl(cursor)\n            } else {\n                let x = cursor.parent\n                self.rechild(cursor, cursor.right)\n                cursor.right = node.right\n                node.right.parent = cursor\n                self.rechild(node, cursor)\n                self.fix_avl(x)\n            }\n        } else {\n            self.rechild(node, node.left || node.right || null)\n            if (node.parent) self.fix_avl(node.parent)\n        }\n    }\n\n    self.next = (node) => {\n        if (node.right) {\n            node = node.right\n            while (node.left) node = node.left\n            return node\n        } else {\n            while (node.parent && node.parent.right == node) node = node.parent\n            return node.parent\n        }\n    }\n\n    return self\n}\n\nmodule.exports = { handle }\n"
  },
  {
    "path": "sync9/old-vis/visualization.html",
    "content": "<script src=\"../../util/require.js\"></script>\n<script src=\"../../util/utilities.js\"></script>\n<script src=\"../../sync9/sync9.js\"></script>\n<script src=\"../../kernel/antimatter.js\"></script>\n<script src=\"../../kernel/errors.js\"></script>\n<script src=\"../../kernel/node.js\"></script>\n<script src=\"../../kernel/pipe.js\"></script>\n<script src=\"../../kernel/test/virtual-p2p.js\"></script>\n<script src=\"visualization.js\"></script>\n<body></body>\n<script src=\"../../kernel/test/tests.js\" charset=\"UTF-8\"></script>\n"
  },
  {
    "path": "sync9/old-vis/visualization.js",
    "content": "module.exports = require.visualization = function create_vis(sim) {\n    var tau = Math.PI*2\n    var debug_frames = []\n    var add_frame = (f) => debug_frames && debug_frames.push(f)\n    var vis = {loop, add_frame}\n\n    var a = document.createElement('div')\n    a.style.display = 'grid'\n    a.style['grid-template-rows'] = '1fr 20px'\n    a.style.width = '100%'\n    a.style.height = '100%'\n    document.body.append(a)\n    \n    var c = document.createElement('canvas')\n    c.width = 1000 * devicePixelRatio\n    c.height = (window.innerHeight - 20) * devicePixelRatio\n    c.style.width = (c.width / devicePixelRatio) + 'px'\n    c.style.height = (c.height / devicePixelRatio) + 'px'\n    var g = c.getContext('2d')\n    a.append(c)\n    \n    // var top_part = document.createElement('div')\n    // a.append(top_part)\n    \n    var slider = document.createElement('input')\n    slider.style.width = '50%'\n    slider.setAttribute('type', 'range')\n    slider.setAttribute('min', '0')\n    slider.setAttribute('max', debug_frames.length - 1)\n    slider.setAttribute('value', debug_frames.length - 1)\n    slider.oninput = () => {\n        is_on = false\n        draw_frame(1*slider.value, 0)\n    }\n    a.append(slider)\n\n    c.addEventListener('mousedown', () => {\n        is_on = !is_on\n    })\n\n    var loop_count = 0\n    var loop_inbetween_count = 0\n    \n    var is_on = true\n    function loop() {\n        if (is_on) {\n            if (loop_inbetween_count == 0) {\n                try {\n                    step(loop_count)\n                } catch (e) {\n                    console.log('e:', e)\n                    console.log('error on loop_count = ' + loop_count)\n                    throw 'stop'\n                }\n                loop_count++\n            }\n\n            if (debug_frames.length > 1)\n                draw_frame(debug_frames.length - 2, loop_inbetween_count / 10)\n            if (debug_frames.length > 300) debug_frames = debug_frames.slice(100)\n            \n            slider.setAttribute('max', debug_frames.length - 2)\n            slider.value = debug_frames.length - 2\n\n            loop_inbetween_count = (loop_inbetween_count + 1) % 1\n        }\n        setTimeout(loop, 30)\n    }\n\n    function draw_frame(di, percent) {\n        if (di == null) di = debug_frames.length - 1\n        var d = debug_frames[di]\n        \n        g.clearRect(0, 0, c.width, c.height)\n        \n        draw_network(c, g, debug_frames, di, percent, 0, 0, 800, 800, 300)\n        sim.peers.forEach((p, i) => {\n            p = d.peers[i]\n            var x = 800\n            var y = 20 + 450*i\n            var r = 10\n\n            if (p.resources.my_key) {\n                draw_fissure_dag(c, g, debug_frames, di, i, x, y, 100, 300, r)\n                \n                draw_time_dag(c, g, debug_frames, di, i, x + 100, y, 300, 300, r)\n\n                var v = p.resources.my_key.space_dag\n                var S = null\n\n                if (v && v.t == 'val') v = space_dag_get(v.S, 0)\n                if (v && v.t == 'lit') v = v.S\n                if (typeof(v) == 'string') S = create_space_dag_node(null, v)\n                if (v && v.t == 'str') S = v.S\n                if (S) draw_space_dag(p, g, S, x + 400, y)\n            }\n        })\n        \n        draw_text(c, g, 'f# = ' + d.frame_num + ' + ' + percent, 0, 0, 'grey', 'left', 'top')\n\n        // top_part.innerHTML = ''\n        // top_part.style.display = 'grid'\n        // top_part.style['grid-template-columns'] = '1fr 1fr 1fr'\n        // sim.peers.forEach((p, i) => {\n        //     p = d.peers[i]\n        //     var dd = document.createElement('textarea')\n        //     dd.value = '= ' + (p.keys.my_key ? JSON.stringify(sync9_read(p.keys.my_key.s9)) : 'n/a') + '\\n\\n' + JSON.stringify(p, null, '    ')\n        //     top_part.append(dd)\n        // })        \n    }\n    \n\n    function draw_text(c, g, text, x, y, color, x_align, y_align, font) {\n        g.font = font || '15px Arial'\n        if (color) g.fillStyle = color\n        g.textAlign = x_align || 'left'\n        g.textBaseline = y_align || 'middle'\n        g.fillText(text, x, y)\n    }\n\n    function draw_network(c, g, frames, fi, percent, x, y, w, h, r) {\n        var peers = frames[fi].peers\n        \n        g.beginPath()\n        g.lineWidth = 0.5\n        g.strokeStyle = 'red'\n        g.rect(x, y, w, h)\n        g.stroke()\n        g.beginPath()\n        g.arc(x + w/2, y + h/2, r, 0, tau)\n        g.stroke()\n        \n        var plank = w/30\n        \n        for (var i = 0; i < peers.length; i++) {\n            for (var ii = i + 1; ii < peers.length; ii++) {\n                var a = tau / peers.length * i\n                var aa = tau / peers.length * ii\n                \n                var p = peers[i]\n                var other_p = peers[ii]\n\n                var connected = Object.keys(p.connected_to).some(pid => pid == other_p.pid) || Object.keys(other_p.connected_to).some(pid => pid == p.pid)\n\n                if (connected) {\n                    g.beginPath()\n                    g.strokeStyle = 'darkgrey'\n                    g.lineWidth = w/30\n                    g.moveTo(x + w/2 + Math.cos(a)*r, y + h/2 + Math.sin(a)*r)\n                    g.lineTo(x + w/2 + Math.cos(aa)*r, y + h/2 + Math.sin(aa)*r)\n                    g.stroke()\n                }\n                \n                function func(i, ii, m, a, aa) {\n                    if (m[0] != peers[ii].pid) return\n                    \n                    var before_frame = fi\n                    while ((before_frame >= 0) && frames[before_frame].peers[i].incoming.some(mm => mm[2] == m[2])) before_frame--\n                    \n                    var after_frame = fi\n                    while ((after_frame < frames.length) && frames[after_frame].peers[i].incoming.some(mm => mm[2] == m[2])) after_frame++\n\n                    var p1 = [x + w/2 + Math.cos(a)*r, y + h/2 + Math.sin(a)*r]\n                    var p2 = [x + w/2 + Math.cos(aa)*r, y + h/2 + Math.sin(aa)*r]\n                    \n                    var f = lerp(before_frame, 0, after_frame, 1, fi + percent)\n                    var pos = lerp(0, p1, 1, p2, f)\n                    \n                    if (m[3] == 'hello') {\n                        g.save()\n                        g.translate(pos[0], pos[1])\n                        g.rotate(Math.atan2(p2[1] - p1[1], p2[0] - p1[0]) + tau/4)\n                        draw_text(c, g, 'H', 0, 0, 'white', 'center', 'middle')\n                        g.restore()\n\n                        g.beginPath()\n                        var rot_by = tau/2 - (23.5 * tau/360)\n                        var forward = norm(sub(p2, pos))\n                        var t0 = add(pos, mul(forward, w/30*8/10))\n                        var len = (w/30 / 2) / Math.sin(23.5 * tau/360)\n                        var t1 = add(t0, mul(rot(forward, rot_by), len))\n                        var t2 = add(t0, mul(rot(forward, -rot_by), len))\n                        g.moveTo(t1[0], t1[1])\n                        g.lineTo(t0[0], t0[1])\n                        g.lineTo(t2[0], t2[1])\n                        g.lineWidth = 1\n                        g.strokeStyle = 'white'\n                        g.stroke()\n                        \n                        g.beginPath()\n                        var rot_by = tau/8\n                        var t0 = add(pos, mul(forward, -w/30 * 0.45))\n                        var len = (w/30 / 2) / Math.sin(tau/8)\n                        var t1 = add(t0, mul(rot(forward, rot_by), len))\n                        var t2 = add(t0, mul(rot(forward, -rot_by), len))\n                        g.moveTo(t1[0], t1[1])\n                        g.lineTo(t0[0], t0[1])\n                        g.lineTo(t2[0], t2[1])\n                        g.lineWidth = 2\n                        g.strokeStyle = 'white'\n                        g.stroke()                    \n                    } else if (m[3] == 'get') {\n                        g.save()\n                        g.translate(pos[0], pos[1])\n                        g.rotate(Math.atan2(p2[1] - p1[1], p2[0] - p1[0]) + tau/4)\n                        draw_text(c, g, 'G', 0, 0, 'white', 'center', 'middle')\n                        g.restore()\n\n                        g.beginPath()\n                        var rot_by = tau/2 - (23.5 * tau/360)\n                        var forward = norm(sub(p2, pos))\n                        var t0 = add(pos, mul(forward, w/30*8/10))\n                        var len = (w/30 / 2) / Math.sin(23.5 * tau/360)\n                        var t1 = add(t0, mul(rot(forward, rot_by), len))\n                        var t2 = add(t0, mul(rot(forward, -rot_by), len))\n                        g.moveTo(t1[0], t1[1])\n                        g.lineTo(t0[0], t0[1])\n                        g.lineTo(t2[0], t2[1])\n                        g.lineWidth = 1\n                        g.strokeStyle = 'white'\n                        g.stroke()\n                        \n                        g.beginPath()\n                        var rot_by = tau/8\n                        var t0 = add(pos, mul(forward, -w/30 * 0.45))\n                        var len = (w/30 / 2) / Math.sin(tau/8)\n                        var t1 = add(t0, mul(rot(forward, rot_by), len))\n                        var t2 = add(t0, mul(rot(forward, -rot_by), len))\n                        g.moveTo(t1[0], t1[1])\n                        g.lineTo(t0[0], t0[1])\n                        g.lineTo(t2[0], t2[1])\n                        g.lineWidth = 2\n                        g.strokeStyle = 'white'\n                        g.stroke()\n                    } else if (m[3] == 'welcome') {\n                        var rr = plank*0.5\n                        for (var a = 0; a < 5; a++) {\n                            g.beginPath()\n                            g.arc(pos[0] + Math.cos(tau/5*a)*rr, pos[1] + Math.sin(tau/5*a)*rr, plank * 0.35, 0, tau)\n                            g.fillStyle = m[4].unack_boundary ? 'lightblue' : 'white'\n                            g.fill()\n                            \n                            g.beginPath()\n                            g.arc(pos[0] + Math.cos(tau/5*a)*rr, pos[1] + Math.sin(tau/5*a)*rr, plank * 0.35, 0, tau)\n                            g.lineWidth = 1\n                            g.strokeStyle = 'blue'\n                            g.stroke()\n                        }\n                    } else if (m[3] == 'set') {\n                        g.beginPath()\n                        g.arc(pos[0], pos[1], plank * 0.7, 0, tau)\n                        g.fillStyle = 'white'\n                        g.fill()\n                        \n                        var my_text = m[4].version\n                        draw_text(c, g, my_text, pos[0], pos[1], 'blue', 'center', 'middle')\n                        \n                        g.beginPath()\n                        g.arc(pos[0], pos[1], plank * 0.7, 0, tau)\n                        g.lineWidth = 1\n                        g.strokeStyle = 'blue'\n                        g.stroke()\n                    } else if (m[3] == 'ack') {\n                        g.beginPath()\n                        g.arc(pos[0], pos[1], plank * 0.7, 0, tau)\n                        g.fillStyle = (m[4].seen == 'local') ? 'lightblue' : 'blue'\n                        g.fill()\n                        \n                        \n                        var my_text = m[4].version\n                        draw_text(c, g, my_text, pos[0], pos[1], (m[4][2] == 'local') ? 'blue' : 'white', 'center', 'middle')\n                        \n                        g.beginPath()\n                        g.arc(pos[0], pos[1], plank * 0.7, 0, tau)\n                        g.lineWidth = 1\n                        g.strokeStyle = 'blue'\n                        g.stroke()\n                    } else if (m[3] == 'fissure') {\n                        var fis = m[4].fissure\n                        \n                        var rand = Math.create_rand(fis.conn)\n                        var color = '#' + rand().toString(16).slice(2, 8)\n                        var rr = 10 * (1 + rand())\n                        \n                        \n                        \n                        \n                        g.beginPath()\n                        g.arc(pos[0], pos[1], plank * 0.7, 0, tau)\n                        g.fillStyle = 'black'\n                        g.fill()\n                        \n                        g.beginPath()\n                        if (fis.a < fis.b) {\n                            g.arc(pos[0], pos[1], rr, tau/4, tau*3/4)\n                        } else {\n                            g.arc(pos[0], pos[1], rr, tau*3/4, tau/4)\n                        }\n                        g.strokeStyle = color\n                        g.lineWidth = 2\n                        g.stroke()\n                        \n                        \n                        \n                    } else {\n                        throw 'unknown message type: ' + m[3]\n                    }\n                }\n                \n                peers[i].incoming.forEach(m => func(i, ii, m, aa, a))\n                peers[ii].incoming.forEach(m => func(ii, i, m, a, aa))\n            }\n        }\n        \n        peers.forEach((p, i) => {\n            var a = tau / peers.length * i\n            g.beginPath()\n            g.fillStyle = p.incoming.length > 0 ? 'blue' : 'green'\n            var pos = [\n                x + w/2 + Math.cos(a)*r,\n                y + h/2 + Math.sin(a)*r\n            ]\n            g.arc(pos[0], pos[1], w/30, 0, tau)\n            g.fill()\n        })\n    }\n\n    function draw_fissure_dag(c, g, frames, fi, pi, x, y, w, h, r) {\n        var peers = frames[fi].peers\n        var peer = peers[pi].resources.my_key\n        if (!peer) return\n        \n        var fs = {}\n        Object.values(peer.fissures).forEach(f => {\n            var ff = fs[f.conn]\n            if (!ff) {\n                var rand = Math.create_rand(f.conn)\n                ff = fs[f.conn] = {\n                    id: f.conn,\n                    color: '#' + rand().toString(16).slice(2, 8),\n                    radius: r * (1 + rand()),\n                    parents: {}\n                }\n            }\n            if (f.a < f.b) ff.has_side_a = true\n            if (f.b < f.a) ff.has_side_b = true\n            \n            Object.keys(f.parents).forEach(p => {\n                \n                // work here\n                if (!peer.fissures[p]) {\n                    //debugger\n                    \n                    ff.has_issue = true\n                    \n                    return\n                }\n                \n                ff.parents[peer.fissures[p].conn] = true\n            })\n        })\n        \n        function get_layer(k) {\n            if (fs[k].layer) return fs[k].layer\n            return fs[k].layer = Object.keys(fs[k].parents).reduce((x, p) => {\n                return Math.max(x, get_layer(p) + 1)\n            }, 0)\n        }\n        Object.keys(fs).forEach(get_layer)\n        \n        var layer_members = {}\n        var num_layers = 0\n        Object.values(fs).forEach(f => {\n            layer_members[f.layer] = layer_members[f.layer] || []\n            layer_members[f.layer].push(f.id)\n            \n            if (f.layer >= num_layers) num_layers = f.layer + 1\n        })\n        \n        Object.values(layer_members).forEach(layer => {\n            layer.sort().forEach((k, i) => {\n                fs[k].layer_i = i\n            })\n        })\n\n        function get_node_pos(f) {\n            var layer_count = layer_members[f.layer].length\n            return [\n                lerp(0, x + r, layer_count, x + w - r, f.layer_i + 0.5),\n                y + r + (f.layer * r*4)\n            ]\n        }\n\n        Object.values(fs).forEach(f => {\n            var a = get_node_pos(f)\n            g.beginPath()\n            Object.keys(f.parents).map(x => fs[x]).forEach(p => {\n                var b = get_node_pos(p)\n                g.moveTo(a[0], a[1])\n                g.lineTo(b[0], b[1])\n            })\n            g.lineWidth = 3\n            g.strokeStyle = 'lightblue'\n            g.stroke()\n        })\n        \n        Object.values(fs).forEach(f => {\n            var node_pos = get_node_pos(f)\n            \n            var rand = Math.create_rand(f.id)\n            var color = '#' + rand().toString(16).slice(2, 8)\n            var rr = r * (1 + rand())\n            \n            g.beginPath()\n            g.arc(node_pos[0], node_pos[1], rr, 0, tau)\n            g.fillStyle = f.has_issue ? 'red' : 'white'\n            g.fill()\n            \n            g.beginPath()\n            if (f.has_side_a) {\n                g.arc(node_pos[0], node_pos[1], rr, tau/4, tau*3/4)\n            }\n            if (f.has_side_b) {\n                g.arc(node_pos[0], node_pos[1], rr, tau*3/4, tau/4)\n            }\n            g.strokeStyle = color\n            g.lineWidth = 2\n            g.stroke()\n        })\n    }\n\n    function draw_time_dag(c, g, frames, fi, pi, x, y, w, h, r) {\n        var peers = frames[fi].peers\n        var resource = peers[pi].resources.my_key\n        if (!resource) return\n        var s9 = resource.mergeable\n        \n        g.lineWidth = 3\n        \n        var vs = {}\n        function get_layer(v) {\n            if (!vs[v]) vs[v] = {vid: v}\n            if (vs[v].layer) return vs[v].layer\n            return vs[v].layer = Object.keys(resource.time_dag[v]).reduce((x, p) => {\n                return Math.max(x, get_layer(p) + 1)\n            }, 0)\n        }\n        Object.keys(resource.time_dag).forEach(get_layer)\n        \n        var layer_members = {}\n        var num_layers = 0\n        Object.values(vs).forEach(v => {\n            layer_members[v.layer] = layer_members[v.layer] || []\n            layer_members[v.layer].push(v.vid)\n            \n            if (v.layer >= num_layers) num_layers = v.layer + 1\n        })\n        \n        Object.values(layer_members).forEach(layer => {\n            layer.sort().forEach((v, i) => {\n                vs[v].layer_i = i\n            })\n        })\n\n        function get_node_pos(v) {\n            var layer_count = layer_members[v.layer].length\n            return [\n                lerp(0, x + r, layer_count + 1, x + w - r, v.layer_i + 1),\n                y + r + (v.layer * r*3)\n            ]\n        }\n\n        Object.entries(vs).forEach(e => {\n            var a_pos = get_node_pos(e[1])\n            g.beginPath()\n            Object.keys(resource.time_dag[e[0]]).forEach(p => {\n                g.moveTo(a_pos[0], a_pos[1])\n                \n                var b_pos = get_node_pos(vs[p])\n                g.lineTo(b_pos[0], b_pos[1])\n            })\n            g.strokeStyle = 'lightblue'\n            g.stroke()\n        })\n        \n        var fully_acked = {}\n        function mark_fully_acked_rec(v) {\n            if (!fully_acked[v]) {\n                fully_acked[v] = true\n                Object.keys(resource.time_dag[v]).forEach(mark_fully_acked_rec)\n            }\n        }\n        Object.keys(resource.acked_boundary).forEach(mark_fully_acked_rec)\n        \n        Object.entries(vs).forEach(e => {\n            var node_pos = get_node_pos(e[1])\n            \n            g.beginPath()\n            g.arc(node_pos[0], node_pos[1], r, 0, tau)\n            g.fillStyle = 'white'\n            g.fill()\n            \n            if (resource.acks_in_process[e[0]]) {\n                var current_count = Math.max(0, resource.acks_in_process[e[0]].count)\n                var max_count = 0\n                var search_i = fi\n                try {\n                    let x = null\n                    while (x = frames[search_i].peers[pi].resources.my_key.acks_in_process[e[0]]) {\n                        max_count = x.count\n                        search_i--\n                    }\n                } catch (e) {}\n                \n                var percent_done = (max_count - current_count) / max_count\n                if (percent_done > 0) {\n                    g.beginPath()\n                    g.arc(node_pos[0], node_pos[1], r, 0, tau/2, true)\n                    if (percent_done == 1) {\n                        g.arc(node_pos[0], node_pos[1], r, tau/2, 0, true)\n                    } else if (percent_done < 0.5) {\n                        var x = lerp(0, r, 0.5, 0, percent_done)\n                        var C = (r*r - x*x) / (2*x)\n                        var angle = Math.atan2(r, C)\n                        g.arc(node_pos[0], node_pos[1] + C, C + x, tau*3/4 - angle, tau*3/4 + angle)\n                    } else if (percent_done > 0.5) {\n                        var x = lerp(0.5, 0, 1, r, percent_done)\n                        var C = (r*r - x*x) / (2*x)\n                        var angle = Math.atan2(r, C)\n                        g.arc(node_pos[0], node_pos[1] - C, C + x, tau/4 - angle, tau/4 + angle)\n                    } else {\n                        g.arc(node_pos[0], node_pos[1] + C, C + x, 0, tau)\n                    }\n                    g.fillStyle = 'lightblue'\n                    g.fill()\n                }\n            }\n            \n            g.beginPath()\n            g.arc(node_pos[0], node_pos[1], r, 0, tau)\n            if (fully_acked[e[0]]) {\n                g.fillStyle = 'blue'\n                g.fill()\n            } else {\n                g.strokeStyle = 'blue'\n                g.stroke()\n            }\n            \n            draw_text(c, g, e[0].slice(0, 3), node_pos[0] + r, node_pos[1] + r, 'grey', 'left', 'top')\n        })\n        \n        Object.keys(resource.unack_boundary).forEach(v => {\n            g.beginPath()\n            g.fillStyle = 'white'\n            var node_pos = get_node_pos(vs[v])\n            g.arc(node_pos[0], node_pos[1], r * 0.5, 0, Math.PI*2)\n            g.fill()\n        })\n        \n        Object.values(resource.fissures).forEach(f => {\n            Object.keys(f.versions).forEach(v => {\n                if (!resource.time_dag[v]) return\n                g.beginPath()\n                \n                var rand = Math.create_rand(f.conn)\n                g.strokeStyle = '#' + rand().toString(16).slice(2, 8)\n                \n                var node_pos = get_node_pos(vs[v])\n                //var rr = r * 1.45\n                var rr = r * (1 + rand())\n                \n                g.lineWidth = 2\n                if (f.a < f.b) {\n                    \n\n\n                    // work here\n                    g.arc(node_pos[0], node_pos[1], rr, tau/4, tau*3/4)\n                    \n                    \n                    \n                    // g.moveTo(node_pos[0] - rr, node_pos[1] - rr)\n                    // g.lineTo(node_pos[0] + rr, node_pos[1] - rr)\n                    // g.lineTo(node_pos[0] + rr, node_pos[1] + rr)\n                    // g.lineTo(node_pos[0] - rr, node_pos[1] + rr)\n                } else {\n                    \n                    g.arc(node_pos[0], node_pos[1], rr, tau/4, tau*3/4, true)\n                    \n                    \n                    // var rrr = Math.sqrt(2) * rr\n                    // g.moveTo(node_pos[0] - rrr, node_pos[1])\n                    // g.lineTo(node_pos[0], node_pos[1] - rrr)\n                    // g.lineTo(node_pos[0] + rrr, node_pos[1])\n                    // g.lineTo(node_pos[0], node_pos[1] + rrr)\n                    // g.closePath()\n                }\n                g.stroke()\n            })\n        })\n        \n    }\n\n    function draw_space_dag(c, g, S, x, y) {\n        function helper(node, y, px, py) {\n            g.beginPath()\n            g.moveTo(x, y)\n            g.lineTo(px, py)\n            g.lineWidth = 1\n            g.strokeStyle = 'lightblue'\n            g.stroke()\n\n            var begin_x\n            var end_x\n            \n            draw_text(c, g, node.vid ? node.vid.slice(0, 3) : '', x, y + 25, 'grey', 'left', 'middle')\n            \n            var my_text = node.elems + (node.end_cap ? '*' : '')\n            \n            draw_text(c, g, my_text, x, y, Object.keys(node.deleted_by).length > 0 ? 'red' : 'blue', 'left', 'middle', '20px Arial')\n            \n            var width = g.measureText(my_text).width\n            x += width\n\n            var px = x\n            x += 10\n            for (var n of node.nexts) helper(n, y + 40, px, y)\n            if (node.next) helper(node.next, y, px, y)\n        }\n        if (typeof(S) == 'string') helper(sync9_create_space_dag_node('lit', S))\n        else helper(S, y, x, y)\n    }\n\n    function lerp(t0, v0, t1, v1, t) {\n        function inner_lerp(t0, v0, t1, v1, t) {\n            return (t - t0) * (v1 - v0) / (t1 - t0) + v0\n        }\n        if (typeof(v0) == 'object') {\n            return v0.map((x, i) => inner_lerp(t0, x, t1, v1[i], t))\n        } else return inner_lerp(t0, v0, t1, v1, t)\n    }\n\n    function rot(a, r) {\n        return [\n            a[0] * Math.cos(r) + a[1] * -Math.sin(r),\n            a[0] * Math.sin(r) + a[1] * Math.cos(r)]\n    }\n\n    var mul   = (a, s) => a.map(a => a * s)\n    var sum   = (a)    => a.reduce((a, b) => a + b, 0)\n    var lenSq = (a)    => sum(a.map(x => x*x))\n    var len   = (a)    => Math.sqrt(lenSq(a))\n    var norm  = (a)    => mul(a, 1 / len(a))\n    var add   = (a, b) => a.map((a, i) => a + b[i])\n    var sub   = (a, b) => a.map((a, i) => a - b[i])\n\n    return vis\n}\n"
  },
  {
    "path": "sync9/sync9.js",
    "content": "// Adapted from https://github.com/dglittle/cdn/blob/gh-pages/sync9_047.html\n\nmodule.exports = require.sync9 = function create (resource) {\n    if (!resource.space_dag) resource.space_dag = null\n    return {\n        read (version) {\n            return read(resource, version)\n        },\n\n        add_version (version, parents, patches, hint) {\n            return add_version(resource, version, parents, patches,\n                               hint && hint.sort_keys)\n        },\n\n        generate_braid (versions) {\n            var ancestors = (versions && Object.keys(versions).length\n                             ? resource.ancestors(versions, true)\n                             : {})\n            var versions = generate_braid(resource, x => ancestors[x])\n\n            // Hey Greg: Why are we cloning versions here?  -Mike\n            versions = JSON.parse(JSON.stringify(versions))\n\n            versions.forEach(x => {\n                // we want to put some of this stuff in a \"hint\" field,\n                // as per the protocol\n                if (x.sort_keys) {\n                    x.hint = {sort_keys: x.sort_keys}\n                    delete x.sort_keys\n                }\n            })\n            return versions\n        },\n\n        prune (bubbles) {\n            return prune(resource, bubbles)\n        }\n    }\n}\n\nfunction generate_braid(resource, is_anc) {\n    if (Object.keys(resource.time_dag).length === 0)\n        return []\n\n    return Object.entries(resource.version_cache).filter(\n               x => !is_anc(x[0])\n           ).map(\n               ([version, set_message]) => {\n                   return resource.version_cache[version]\n                       = set_message || generate_set_message(version)\n           })\n\n    function generate_set_message(version) {\n        if (!Object.keys(resource.time_dag[version]).length) {\n            return {\n                version,\n                parents: {},\n                patches: [` = ${JSON.stringify(read_raw(resource, v => v == version))}`]\n            }\n        }\n    \n        var is_lit = x => !x || typeof(x) !== 'object' || x.t === 'lit'\n        var get_lit = x => (x && typeof(x) === 'object' && x.t === 'lit') ? x.S : x\n    \n        var ancs = resource.ancestors({[version]: true})\n        delete ancs[version]\n        var is_anc = x => ancs[x]\n        var path = []\n        var patches = []\n        var sort_keys = {}\n        recurse(resource.space_dag)\n        function recurse(x) {\n            if (is_lit(x)) {\n            } else if (x.t === 'val') {\n                space_dag_generate_braid(x.S, resource, version, is_anc).forEach(s => {\n                    if (s[2].length) {\n                        patches.push(`${path.join('')} = ${JSON.stringify(s[2][0])}`)\n                        if (s[3]) sort_keys[patches.length - 1] = s[3]\n                    }\n                })\n                traverse_space_dag(x.S, is_anc, node => {\n                    node.elems.forEach(recurse)\n                })\n            } else if (x.t === 'arr') {\n                space_dag_generate_braid(x.S, resource, version, is_anc).forEach(s => {\n                    patches.push(`${path.join('')}[${s[0]}:${s[0] + s[1]}] = ${JSON.stringify(s[2])}`)\n                    if (s[3]) sort_keys[patches.length - 1] = s[3]\n                })\n                var i = 0\n                traverse_space_dag(x.S, is_anc, node => {\n                    node.elems.forEach(e => {\n                        path.push(`[${i++}]`)\n                        recurse(e)\n                        path.pop()\n                    })\n                })\n            } else if (x.t === 'obj') {\n                Object.entries(x.S).forEach(e => {\n                    path.push('[' + JSON.stringify(e[0]) + ']')\n                    recurse(e[1])\n                    path.pop()\n                })\n            } else if (x.t === 'str') {\n                space_dag_generate_braid(x.S, resource, version, is_anc).forEach(s => {\n                    patches.push(`${path.join('')}[${s[0]}:${s[0] + s[1]}] = ${JSON.stringify(s[2])}`)\n                    if (s[3]) sort_keys[patches.length - 1] = s[3]\n                })\n            }\n        }\n    \n        return {\n            version,\n            parents: Object.assign({}, resource.time_dag[version]),\n            patches,\n            sort_keys\n        }\n    }\n}\n\nfunction space_dag_generate_braid(S, resource, version, is_anc) {\n    var splices = []\n\n    function add_ins(offset, ins, sort_key, end_cap) {\n        if (typeof(ins) !== 'string')\n            ins = ins.map(x => read_raw(x, () => false))\n        if (splices.length > 0) {\n            var prev = splices[splices.length - 1]\n            if (prev[0] + prev[1] === offset && !end_cap && (prev[4] === 'i' || (prev[4] === 'r' && prev[1] === 0))) {\n                prev[2] = prev[2].concat(ins)\n                return\n            }\n        }\n        splices.push([offset, 0, ins, sort_key, end_cap ? 'r' : 'i'])\n    }\n\n    function add_del(offset, del, ins) {\n        if (splices.length > 0) {\n            var prev = splices[splices.length - 1]\n            if (prev[0] + prev[1] === offset && prev[4] !== 'i') {\n                prev[1] += del\n                return\n            }\n        }\n        splices.push([offset, del, ins, null, 'd'])\n    }\n    \n    var offset = 0\n    function helper(node, _version, end_cap) {\n        if (_version === version) {\n            add_ins(offset, node.elems.slice(0), node.sort_key, end_cap)\n        } else if (node.deleted_by[version] && node.elems.length > 0) {\n            add_del(offset, node.elems.length, node.elems.slice(0, 0))\n        }\n        \n        if ((!_version || is_anc(_version)) && !Object.keys(node.deleted_by).some(is_anc)) {\n            offset += node.elems.length\n        }\n        \n        node.nexts.forEach(next => helper(next, next.version, node.end_cap))\n        if (node.next) helper(node.next, _version)\n    }\n    helper(S, null)\n    splices.forEach(s => {\n        // if we have replaces with 0 deletes,\n        // make them have at least 1 delete..\n        // this can happen when there are multiple replaces of the same text,\n        // and our code above will associate those deletes with only one of them\n        if (s[4] === 'r' && s[1] === 0) s[1] = 1\n    })\n    return splices\n}\n\n\n\nfunction prune(resource, to_bubble) {\n    assert(resource.time_dag, 'No time dag on ' + JSON.stringify(resource))\n\n    var is_lit = x => !x || typeof(x) != 'object' || x.t == 'lit'\n    var get_lit = x => (x && typeof(x) == 'object' && x.t == 'lit') ? x.S : x\n\n    var seen_annotations = {}\n    see_annotations(resource.space_dag)\n    function see_annotations(x, is_lit_override) {\n        if (is_lit_override || is_lit(x)) {\n            if (!is_lit_override && x && typeof(x) == 'object' && x.t == 'lit') x = x.S\n            if (Array.isArray(x)) for (y of x) see_annotations(y, true)\n            else if (x && typeof(x) == 'object') {\n                if (x.type == 'location') seen_annotations[x.id] = true\n                else for (y of Object.values(x)) see_annotations(y, true)\n            }\n        } else if (x.t == 'val') {\n            traverse_space_dag(x.S, () => true, node => {\n                node.elems.forEach(x => see_annotations(x))\n            }, true)\n        } else if (x.t == 'arr') {\n            traverse_space_dag(x.S, () => true, node => {\n                node.elems.forEach(x => see_annotations(x))\n            }, true)\n        } else if (x.t == 'obj') {\n            Object.values(x.S).forEach(x => see_annotations(x))\n        }\n    }\n\n    function recurse(x) {\n        if (is_lit(x)) return x\n        if (x.t == 'val') {\n            space_dag_prune(x.S, to_bubble)\n            traverse_space_dag(x.S, () => true, node => {\n                node.elems = node.elems.slice(0, 1).map(recurse)\n            }, true)\n            if (x.S.nexts.length == 0 && !x.S.next && x.S.elems.length == 1 && is_lit(x.S.elems[0])) return x.S.elems[0]\n            return x\n        }\n        if (x.t == 'arr') {\n            space_dag_prune(x.S, to_bubble, seen_annotations)\n            traverse_space_dag(x.S, () => true, node => {\n                node.elems = node.elems.map(recurse)\n            }, true)\n            if (x.S.nexts.length == 0 && !x.S.next && x.S.elems.every(is_lit) && !Object.keys(x.S.deleted_by).length && !x.S.annotations) return {t: 'lit', S: x.S.elems.map(get_lit)}\n            return x\n        }\n        if (x.t == 'obj') {\n            Object.entries(x.S).forEach(e => {\n                var y = x.S[e[0]] = recurse(e[1])\n                if (is_lit(y) && y && typeof(y) == 'object' && y.S.type == 'deleted')\n                    delete x.S[e[0]]\n            })\n            if (Object.values(x.S).every(is_lit)) {\n                var o = {}\n                Object.entries(x.S).forEach(e => o[e[0]] = get_lit(e[1]))\n                return {t: 'lit', S: o}\n            }\n            return x\n        }\n        if (x.t == 'str') {\n            space_dag_prune(x.S, to_bubble, seen_annotations)\n            if (x.S.nexts.length == 0 && !x.S.next && !Object.keys(x.S.deleted_by).length && !x.S.annotations) return x.S.elems\n            return x\n        }\n    }\n    resource.space_dag = recurse(resource.space_dag)\n\n    Object.entries(to_bubble).forEach(([version, bubble]) => {\n        if (version === bubble[1])\n            resource.time_dag[bubble[0]] = resource.time_dag[bubble[1]]\n        if (version !== bubble[0]) {\n            delete resource.time_dag[version]\n            delete resource.version_cache[version]\n        } else resource.version_cache[version] = null\n    })\n\n    // Now we check to see if we can collapse the spacedag down to a literal.\n    //\n    // Todo: Should this code be looking so intimately at antimatter data,\n    // like the acked_boundary and fissures?  Shouldn't that part be computed\n    // in the antimatter section?  Maybe it should just pass the result of\n    // this computation into the prune() function as a paramter?\n    //\n    // (This code also assumes there is a God (a single first version adder))\n    var leaves = Object.keys(resource.current_version)\n    var acked_boundary = Object.keys(resource.acked_boundary)\n    var fiss = Object.keys(resource.fissures)\n    if (leaves.length === 1 && acked_boundary.length === 1\n        && leaves[0] === acked_boundary[0] && fiss.length === 0\n        && !Object.keys(seen_annotations).length) {\n\n        resource.time_dag = { [leaves[0]]: {} }\n        var val = read_raw(resource)\n        resource.space_dag = (val && typeof(val) === 'object'\n                              ? {t: 'lit', S: val}\n                              : val)\n    }\n}\n\nfunction space_dag_prune(S, to_bubble, seen_annotations) {\n\n    traverse_space_dag(S, () => true, node => {\n        if (to_bubble[node.version] && to_bubble[node.version][0] != node.version) {\n            if (!node.sort_key) node.sort_key = node.version\n            node.version = to_bubble[node.version][0]\n        }\n\n        for (var x of Object.keys(node.deleted_by)) {\n            if (to_bubble[x]) {\n                delete node.deleted_by[x]\n                node.deleted_by[to_bubble[x][0]] = true\n            }\n        }\n\n        if (node.annotations) {\n            for (k of Object.keys(node.annotations))\n                if (!seen_annotations[k]) delete node.annotations[k]\n            if (!Object.keys(node.annotations).length) delete node.annotations\n        }\n    }, true)\n\n    function set_nnnext(node, next) {\n        while (node.next) node = node.next\n        node.next = next\n    }\n\n    do_line(S, S.version)\n    function do_line(node, version) {\n        var prev = null\n        while (node) {\n            if (node.nexts[0] && node.nexts[0].version == version) {\n                for (let i = 0; i < node.nexts.length; i++) {\n                    delete node.nexts[i].version\n                    delete node.nexts[i].sort_key\n                    set_nnnext(node.nexts[i], i + 1 < node.nexts.length ? node.nexts[i + 1] : node.next)\n                }\n                node.next = node.nexts[0]\n                node.nexts = []\n            }\n\n            if (node.deleted_by[version]) {\n                if (node.annotations) Object.keys(node.annotations).forEach(k => node.annotations[k] = 0)\n                node.elems = node.elems.slice(0, 0)\n                node.deleted_by = {}\n                if (prev) { node = prev; continue }\n            }\n\n            var next = node.next\n\n            if (!node.nexts.length && next && (!node.elems.length || !next.elems.length || (Object.keys(node.deleted_by).every(x => next.deleted_by[x]) && Object.keys(next.deleted_by).every(x => node.deleted_by[x])))) {\n                if (next.annotations) {\n                    node.annotations = node.annotations || {}\n                    Object.entries(next.annotations).forEach(e => {\n                        node.annotations[e[0]] = node.elems.length + e[1]\n                    })\n                }\n                if (!node.elems.length) node.deleted_by = next.deleted_by\n                node.elems = node.elems.concat(next.elems)\n                node.end_cap = next.end_cap\n                node.nexts = next.nexts\n                node.next = next.next\n                continue\n            }\n\n            for (let n of node.nexts) do_line(n, n.version)\n\n            prev = node\n            node = next\n        }\n    }\n}\n\nfunction add_version(resource, version, parents, patches, sort_keys, is_anc) {\n    let make_lit = x => (x && typeof(x) == 'object') ? {t: 'lit', S: x} : x\n    \n    if (!sort_keys) sort_keys = {}\n    \n    if (!Object.keys(parents).length) {\n        var parse = parse_patch(patches[0])\n        resource.space_dag = make_lit(parse.value)\n        parse.annotations && create_annotations(parse.annotations)\n        return\n    }\n    \n    if (!is_anc) {\n        if (parents == resource.current_version)\n            is_anc = (_version) => _version != version\n        else {\n            var ancs = resource.ancestors(parents)\n            is_anc = _version => ancs[_version]\n        }\n    }\n\n    var annotations = {}\n    \n    patches.forEach((patch, i) => {\n        var sort_key = sort_keys[i]\n        var parse = parse_patch(patch)\n        Object.assign(annotations, parse.annotations)\n        var cur = resolve_path(parse)\n        if (!parse.slice) {\n            if (cur.t != 'val') throw 'bad'\n            var len = space_dag_length(cur.S, is_anc)\n            space_dag_add_version(cur.S, version, [[0, len, [parse.delete ? make_lit({type: 'deleted'}) : make_lit(parse.value)]]], sort_key, is_anc)\n        } else {\n            if (typeof parse.value === 'string' && cur.t !== 'str')\n                throw `Cannot splice string ${JSON.stringify(parse.value)} into non-string`\n            if (parse.value instanceof Array && cur.t !== 'arr')\n                throw `Cannot splice array ${JSON.stringify(parse.value)} into non-array`\n            if (parse.value instanceof Array)\n                parse.value = parse.value.map(x => make_lit(x))\n\n            var r0 = parse.slice[0]\n            var r1 = parse.slice[1]\n            if (r0 < 0 || Object.is(r0, -0) || r1 < 0 || Object.is(r1, -0)) {\n                let len = space_dag_length(cur.S, is_anc)\n                if (r0 < 0 || Object.is(r0, -0)) r0 = len + r0\n                if (r1 < 0 || Object.is(r1, -0)) r1 = len + r1\n            }\n\n            space_dag_add_version(\n                cur.S, version, [[r0, r1 - r0, parse.value]], sort_key, is_anc\n            )\n        }\n    })\n\n    create_annotations(annotations)\n    function create_annotations(annotations) {\n        var prev_is_anc = is_anc\n        is_anc = v => prev_is_anc(v) || v == version\n        Object.entries(annotations).forEach(e => {\n            e[1].slice = [0, 0]\n            var cur = resolve_path(e[1])\n            function helper(node, offset) {\n                if (offset <= e[1].pos && e[1].pos <= offset + node.elems.length) {\n                    node.annotations = node.annotations || {}\n                    node.annotations[e[0]] = e[1].pos - offset\n                    return false\n                }\n            }\n            if (e[1].pos == 0) helper(cur.S, 0)\n            else traverse_space_dag(cur.S, is_anc, helper)\n        })\n    }\n\n    function resolve_path(parse) {\n        var cur = resource.space_dag\n        if (!cur || typeof(cur) != 'object' || cur.t == 'lit')\n            cur = resource.space_dag = {t: 'val', S: create_space_dag_node(null, [cur])}\n        var prev_S = null\n        var prev_i = 0\n        for (var i=0; i<parse.path.length; i++) {\n            var key = parse.path[i]\n            if (cur.t == 'val') cur = space_dag_get(prev_S = cur.S, prev_i = 0, is_anc)\n            if (cur.t == 'lit') {\n                var new_cur = {}\n                if (cur.S instanceof Array) {\n                    new_cur.t = 'arr'\n                    new_cur.S = create_space_dag_node(null, cur.S.map(x => make_lit(x)))\n                } else {\n                    if (typeof(cur.S) != 'object') throw 'bad'\n                    new_cur.t = 'obj'\n                    new_cur.S = {}\n                    Object.entries(cur.S).forEach(e => new_cur.S[e[0]] = make_lit(e[1]))\n                }\n                cur = new_cur\n                space_dag_set(prev_S, prev_i, cur, is_anc)\n            }\n            if (cur.t == 'obj') {\n                let x = cur.S[key]\n                if (!x || typeof(x) != 'object' || x.t == 'lit')\n                    x = cur.S[key] = {t: 'val', S: create_space_dag_node(null, [x == undefined ? {t: 'lit', S: {type: 'deleted'}} : x])}\n                cur = x\n            } else if (i == parse.path.length - 1 && !parse.slice) {\n                parse.slice = [key, key + 1]\n                parse.value = (cur.t == 'str') ? parse.value : [parse.value]\n            } else if (cur.t == 'arr') {\n                cur = space_dag_get(prev_S = cur.S, prev_i = key, is_anc)\n            } else throw 'bad'\n        }\n        if (parse.slice) {\n            if (cur.t == 'val') cur = space_dag_get(prev_S = cur.S, prev_i = 0, is_anc)\n            if (typeof(cur) == 'string') {\n                cur = {t: 'str', S: create_space_dag_node(null, cur)}\n                space_dag_set(prev_S, prev_i, cur, is_anc)\n            } else if (cur.t == 'lit') {\n                if (!(cur.S instanceof Array)) throw 'bad'\n                cur = {t: 'arr', S: create_space_dag_node(null, cur.S.map(x => make_lit(x)))}\n                space_dag_set(prev_S, prev_i, cur, is_anc)\n            }\n        }\n        return cur\n    }\n}\n\nfunction read(x, is_anc) {\n    if (!is_anc) is_anc = () => true\n    var annotations = {}\n    return finalize(read_raw(x, is_anc, annotations))\n    function finalize(x) {\n        if (Array.isArray(x))\n            for (var i = 0; i < x.length; i++) x[i] = finalize(x[i])\n        else if (x && typeof(x) == 'object') {\n            if (x.type == 'location')\n                return annotations[x.id]\n            else {\n                var y = {}\n                Object.entries(x).forEach(e => {\n                    if (e[1] && typeof(e[1]) == 'object' && e[1].type == 'deleted') return\n                    var key = e[0].match(/^_+type$/) ? e[0].slice(1) : e[0]\n                    y[key] = finalize(e[1])\n                })\n                return y\n            }\n        }\n        return x\n    }\n}\n\nfunction read_raw(x, is_anc, annotations) {\n    if (!is_anc) is_anc = () => true\n    else if (typeof(is_anc) == 'string') {\n        var ancs = x.ancestors({[is_anc]: true})\n        is_anc = v => ancs[v]\n    } else if (typeof(is_anc) == 'object') {\n        var ancs = x.ancestors(is_anc)\n        is_anc = v => ancs[v]\n    }\n\n    return finalize(rec_read(x))\n    function rec_read(x) {\n        if (x && typeof(x) == 'object') {\n            if (!x.t) return rec_read(x.space_dag)\n            if (x.t == 'lit') return JSON.parse(JSON.stringify(x.S))\n            if (x.t == 'val') return rec_read(space_dag_get(x.S, 0, is_anc))\n            if (x.t == 'obj') {\n                var o = {}\n                Object.entries(x.S).forEach(([k, v]) => o[k] = rec_read(v))\n                return o\n            }\n            if (x.t == 'arr') {\n                var a = []\n                traverse_space_dag(x.S, is_anc, (node, _, __, ___, ____, deleted) => {\n                    if (annotations && node.annotations) Object.entries(node.annotations).forEach(e => {\n                        annotations[e[0]] = a.length + (deleted ? 0 : e[1])\n                    })\n                    if (!deleted) {\n                        node.elems.forEach((e) => {\n                            a.push(rec_read(e))\n                        })\n                    }\n                }, true)\n                return a\n            }\n            if (x.t == 'str') {\n                var s = []\n                var len = 0\n                traverse_space_dag(x.S, is_anc, (node, _, __, ___, ____, deleted) => {\n                    if (annotations && node.annotations) Object.entries(node.annotations).forEach(e => {\n                        annotations[e[0]] = len + (deleted ? 0 : e[1])\n                    })\n                    if (!deleted) {\n                        s.push(node.elems)\n                        len += node.elems.length\n                    }\n                }, true)\n                return s.join('')\n            }\n            throw 'bad'\n        } return x\n    }\n    function finalize(x) {\n        if (Array.isArray(x)) x.forEach(x => finalize(x))\n        else if (x && typeof(x) == 'object') {\n            if (!annotations && x.type == 'location') delete x.id\n            else Object.values(x).forEach(x => finalize(x))\n        }\n        return x\n    }\n}\n\nfunction create_space_dag_node(version, elems, end_cap, sort_key) {\n    return {\n        version : version,\n        sort_key : sort_key,\n        elems : elems,\n        deleted_by : {},\n        end_cap : end_cap,\n        nexts : [],\n        next : null\n    }\n}\n\nfunction space_dag_get(S, i, is_anc) {\n    var ret = null\n    var offset = 0\n    traverse_space_dag(S, is_anc ? is_anc : () => true, (node) => {\n        if (i - offset < node.elems.length) {\n            ret = node.elems[i - offset]\n            return false\n        }\n        offset += node.elems.length\n    })\n    return ret\n}\n\nfunction space_dag_set(S, i, v, is_anc) {\n    var offset = 0\n    traverse_space_dag(S, is_anc ? is_anc : () => true, (node) => {\n        if (i - offset < node.elems.length) {\n            node.elems[i - offset] = v\n            return false\n        }\n        offset += node.elems.length\n    })\n}\n\nfunction space_dag_length(S, is_anc) {\n    var count = 0\n    traverse_space_dag(S, is_anc ? is_anc : () => true, node => {\n        count += node.elems.length\n    })\n    return count\n}\n\nfunction space_dag_break_node(node, x, end_cap, new_next) {\n    var tail = create_space_dag_node(null, node.elems.slice(x), node.end_cap)\n    Object.assign(tail.deleted_by, node.deleted_by)\n    tail.nexts = node.nexts\n    tail.next = node.next\n    \n    node.elems = node.elems.slice(0, x)\n    node.end_cap = end_cap\n    node.nexts = new_next ? [new_next] : []\n    node.next = tail\n\n    var annotations = node.annotations || {}\n    delete node.annotations\n    Object.entries(annotations).forEach(e => {\n        if (e[1] <= x) {\n            node.annotations = node.annotations || {}\n            node.annotations[e[0]] = e[1]\n        } else {\n            tail.annotations = tail.annotations || {}\n            tail.annotations[e[0]] = e[1] - x\n        }\n    })\n    \n    return tail\n}\n\nfunction space_dag_add_version(S, version, splices, sort_key, is_anc) {\n    \n    function add_to_nexts(nexts, to) {\n        var i = binarySearch(nexts, function (x) {\n            if ((to.sort_key || to.version) < (x.sort_key || x.version)) return -1\n            if ((to.sort_key || to.version) > (x.sort_key || x.version)) return 1\n            return 0\n        })\n        nexts.splice(i, 0, to)\n    }\n    \n    var si = 0\n    var delete_up_to = 0\n    \n    // `node` is a patch\n    var process_patch = (node, offset, has_nexts, prev, _version, deleted) => {\n        var s = splices[si]\n        if (!s) return false\n        \n        if (deleted) {\n            if (s[1] == 0 && s[0] == offset) {\n                if (node.elems.length == 0 && !node.end_cap && has_nexts) return\n                var new_node = create_space_dag_node(version, s[2], null, sort_key)\n                if (node.elems.length == 0 && !node.end_cap)\n                    add_to_nexts(node.nexts, new_node)\n                else\n                    space_dag_break_node(node, 0, undefined, new_node)\n                si++\n            }\n            return            \n        }\n        \n        if (s[1] == 0) {\n            var d = s[0] - (offset + node.elems.length)\n            if (d > 0) return\n            if (d == 0 && !node.end_cap && has_nexts) return\n            var new_node = create_space_dag_node(version, s[2], null, sort_key)\n            if (d == 0 && !node.end_cap) {\n                add_to_nexts(node.nexts, new_node)\n            } else {\n                space_dag_break_node(node, s[0] - offset, undefined, new_node)\n            }\n            si++\n            return\n        }\n        \n        if (delete_up_to <= offset) {\n            var d = s[0] - (offset + node.elems.length)\n            if (d >= 0) return\n            delete_up_to = s[0] + s[1]\n            \n            if (s[2]) {\n                var new_node = create_space_dag_node(version, s[2], null, sort_key)\n                if (s[0] == offset && prev && prev.end_cap) {\n                    add_to_nexts(prev.nexts, new_node)\n                } else {\n                    space_dag_break_node(node, s[0] - offset, true, new_node)\n                    return\n                }\n            } else {\n                if (s[0] == offset) {\n                } else {\n                    space_dag_break_node(node, s[0] - offset)\n                    return\n                }\n            }\n        }\n        \n        if (delete_up_to > offset) {\n            if (delete_up_to <= offset + node.elems.length) {\n                if (delete_up_to < offset + node.elems.length) {\n                    space_dag_break_node(node, delete_up_to - offset)\n                }\n                si++\n            }\n            node.deleted_by[version] = true\n            return\n        }\n    }\n    \n    var f = is_anc\n    var exit_early = {}\n    var offset = 0\n    function traverse(node, prev, version) {\n        var has_nexts = node.nexts.find(next => f(next.version))\n        var deleted = Object.keys(node.deleted_by).some(version => f(version))\n        if (process_patch(node, offset, has_nexts, prev, version, deleted) == false)\n            throw exit_early\n        if (!deleted) {\n            offset += node.elems.length\n        }\n        for (var next of node.nexts)\n            if (f(next.version)) traverse(next, null, next.version)\n        if (node.next) traverse(node.next, node, version)\n    }\n    try {\n        if (!S) debugger\n        traverse(S, null, S.version)\n    } catch (e) {\n        if (e != exit_early) throw e\n    }\n    \n}\n\nfunction traverse_space_dag(S, f, cb, view_deleted, tail_cb) {\n    var exit_early = {}\n    var offset = 0\n    function helper(node, prev, version) {\n        var has_nexts = node.nexts.find(next => f(next.version))\n        var deleted = Object.keys(node.deleted_by).some(version => f(version))\n        if (view_deleted || !deleted) {\n            if (cb(node, offset, has_nexts, prev, version, deleted) == false)\n                throw exit_early\n            offset += node.elems.length\n        }\n        for (var next of node.nexts)\n            if (f(next.version)) helper(next, null, next.version)\n        if (node.next) helper(node.next, node, version)\n        else if (tail_cb) tail_cb(node)\n    }\n    try {\n        helper(S, null, S.version)\n    } catch (e) {\n        if (e != exit_early) throw e\n    }\n}\n\nvar parse_patch = require('../util/utilities.js').parse_patch\n\n// modified from https://stackoverflow.com/questions/22697936/binary-search-in-javascript\nfunction binarySearch(ar, compare_fn) {\n    var m = 0;\n    var n = ar.length - 1;\n    while (m <= n) {\n        var k = (n + m) >> 1;\n        var cmp = compare_fn(ar[k]);\n        if (cmp > 0) {\n            m = k + 1;\n        } else if(cmp < 0) {\n            n = k - 1;\n        } else {\n            return k;\n        }\n    }\n    return m;\n}\n"
  },
  {
    "path": "util/apply-patch.js",
    "content": "function apply_patch (obj, range, content) {\n\n    // Descend down a bunch of objects until we get to the final object\n    // The final object can be a slice\n    // Set the value in the final object\n\n    var path = range,\n        new_stuff = content\n\n    // We will break up the path into segments, like:\n    //\n    //   Path: \".foo.bar[3]\"\n    //\n    //   Segments:\n    //     - \".foo\"\n    //     - \".bar\"\n    //     - \"[3]\"\n\n    var path_segment = /^(\\.([^\\.\\[]+))|(\\[((-?\\d+):)?(-?\\d+)\\])/\n    var curr_obj = obj,\n        last_obj = null\n\n    // Then we'll iterate through each segment, and descend into the obj.\n    //\n    // When we reach the *last* segment, we set its value to `content`, and\n    // then we're done!\n\n    do {\n\n        // Grab the next segment from the path\n\n        var match = path_segment.exec(path),\n            subpath = match[0],\n            field = match[2],\n            slice_start = match[5],\n            slice_end = match[6]\n\n        slice_start = slice_start && de_neg(slice_start)\n        slice_end = slice_end && de_neg(slice_end)\n\n        // If this is not the last segment, then let's iterate one step deeper\n        // into the object until we find the thing we're supposed to replace.\n\n        if (path.length !== subpath.length) {\n            console.assert(!slice_start, 'No splices allowed in middle of path')\n            last_obj = curr_obj\n            last_field = field\n            curr_obj = curr_obj[field || slice_end]\n            path = path.substr(subpath.length)\n        }\n\n        // Otherwise, we made it!  Let's replace the range with its new\n        // contents!\n\n        else {\n            // There are 4 things we can set the values of:\n\n            // Case 1: Object\n            if (field)\n                curr_obj[field] = new_stuff\n\n            // Case 2: Strings\n            else if (typeof curr_obj == 'string') {  // String\n                console.assert(typeof new_stuff == 'string')\n                if (!slice_start) {\n                    slice_start = slice_end;\n                    slice_end = slice_end+1\n                }\n                if (last_obj) {\n                    var s = last_obj[last_field]\n                    last_obj[last_field] = (s.slice(0, slice_start)\n                                            + new_stuff\n                                            + s.slice(slice_end))\n                } else\n                    return obj.slice(0, slice_start) + new_stuff + obj.slice(slice_end)\n            \n            }\n\n            // Then it's an Array!  We have two ways to set an Array:\n            else {\n                // Case 3: Array Splice (e.g. [3:9] = [1]\n                if (slice_start)\n                    [].splice.apply(curr_obj, [slice_start, slice_end-slice_start]\n                                    .concat(new_stuff))\n\n                // Case 4: Array Set (e.g. [3] = true\n                else {\n                    console.assert(slice_end >= 0, 'Index '+subpath+' is too small')\n                    console.assert(slice_end <= curr_obj.length - 1,\n                                   'Index '+subpath+' is too big')\n                    curr_obj[slice_end] = new_stuff\n                }\n            }\n\n            return obj\n        }\n\n    } while (true)\n\n    // This helper converts negative indices, like \"[-9]\" or \"[-0]\"\n    function de_neg (x) {\n        return x[0] === '-'\n            ? curr_obj.length - parseInt(x.substr(1))\n            : parseInt(x)\n    }\n}\n\n\nif (require.main === module) {\n    // Tests!\n    console.log('\\nTests:')\n    console.log(apply_patch({a: 'b'}, '.a', 'c'))\n    console.log(apply_patch([1,2,3], '[1]', 9))\n    console.log(apply_patch([1,2,3], '[1:-0]', [10,100]))\n    console.log(apply_patch([1,2,{a:'b'}], '[2].b', 9))\n    console.log(apply_patch([1,2,{a:'b'}], '[2].a', 99))\n\n    // Answer key\n    console.log(`\\nCorrect Answers:\n{ a: 'c' }\n[ 1, 9, 3 ]\n[ 1, 10, 100 ]\n[ 1, 2, { a: 'b', b: 9 } ]\n[ 1, 2, { a: 99 } ]\n`)\n}"
  },
  {
    "path": "util/braid-bundler.js",
    "content": "// Bundles up the client javascript file.\nvar files = [\n    'util/require.js',\n    'util/utilities.js',\n    'sync9/sync9.js',\n    'kernel/antimatter.js',\n    'kernel/errors.js',\n    'kernel/node.js',\n    'kernel/pipe.js',\n    'util/diff.js',\n    'kernel/store.js',\n    'kernel/websocket-client.js',\n    'kernel/http-client.js',\n    'braidify/braidify-client.js',\n    'kernel/leadertab-shell.js',\n]\n\nvar fs = require('fs')\n\n// Translate relative directories\nvar file_at = (f) => require('path').join(__dirname, '..', f)\n\n// Create builds/ directory if it doesn't exist\nif (!fs.existsSync(file_at('builds')))\n    fs.mkdirSync(file_at('builds'))\n\n// Write the bundle file\nfs.writeFileSync(\n    file_at('builds/braid-bundle.js'),\n    files.map(f => fs.readFileSync(file_at(f))).join('\\n')\n)\n"
  },
  {
    "path": "util/diff.js",
    "content": "\nfunction diff_convert_to_my_format(d, factor) {\n    if (factor === undefined) factor = 1\n    var x = []\n    var ii = 0\n    for (var i = 0; i < d.length; i++) {\n        var dd = d[i]\n        if (dd[0] == DIFF_EQUAL) {\n            ii += dd[1].length\n            continue\n        }\n        var xx = [ii, 0, '']\n        if (dd[0] == DIFF_INSERT * factor) {\n            xx[2] = dd[1]\n        } else if (dd[0] == DIFF_DELETE * factor) {\n            xx[1] = dd[1].length\n            ii += xx[1]\n        }\n        if (i + 1 < d.length) {\n            dd = d[i + 1]\n            if (dd[0] != DIFF_EQUAL) {\n                if (dd[0] == DIFF_INSERT * factor) {\n                    xx[2] = dd[1]\n                } else if (dd[0] == DIFF_DELETE * factor) {\n                    xx[1] = dd[1].length\n                    ii += xx[1]\n                }\n                i++\n            }\n        }\n        x.push(xx)\n    }\n    return x\n}\n\n/**\n * This library modifies the diff-patch-match library by Neil Fraser\n * by removing the patch and match functionality and certain advanced\n * options in the diff function. The original license is as follows:\n *\n * ===\n *\n * Diff Match and Patch\n *\n * Copyright 2006 Google Inc.\n * http://code.google.com/p/google-diff-match-patch/\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *   http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\n/**\n * The data structure representing a diff is an array of tuples:\n * [[DIFF_DELETE, 'Hello'], [DIFF_INSERT, 'Goodbye'], [DIFF_EQUAL, ' world.']]\n * which means: delete 'Hello', add 'Goodbye' and keep ' world.'\n */\nvar DIFF_DELETE = -1;\nvar DIFF_INSERT = 1;\nvar DIFF_EQUAL = 0;\n\n\n/**\n * Find the differences between two texts.  Simplifies the problem by stripping\n * any common prefix or suffix off the texts before diffing.\n * @param {string} text1 Old string to be diffed.\n * @param {string} text2 New string to be diffed.\n * @param {Int} cursor_pos Expected edit position in text1 (optional)\n * @return {Array} Array of diff tuples.\n */\nfunction diff_main(text1, text2, cursor_pos) {\n  // Check for equality (speedup).\n  if (text1 == text2) {\n    if (text1) {\n      return [[DIFF_EQUAL, text1]];\n    }\n    return [];\n  }\n\n  // Check cursor_pos within bounds\n  if (cursor_pos < 0 || text1.length < cursor_pos) {\n    cursor_pos = null;\n  }\n\n  // Trim off common prefix (speedup).\n  var commonlength = diff_commonPrefix(text1, text2);\n  var commonprefix = text1.substring(0, commonlength);\n  text1 = text1.substring(commonlength);\n  text2 = text2.substring(commonlength);\n\n  // Trim off common suffix (speedup).\n  commonlength = diff_commonSuffix(text1, text2);\n  var commonsuffix = text1.substring(text1.length - commonlength);\n  text1 = text1.substring(0, text1.length - commonlength);\n  text2 = text2.substring(0, text2.length - commonlength);\n\n  // Compute the diff on the middle block.\n  var diffs = diff_compute_(text1, text2);\n\n  // Restore the prefix and suffix.\n  if (commonprefix) {\n    diffs.unshift([DIFF_EQUAL, commonprefix]);\n  }\n  if (commonsuffix) {\n    diffs.push([DIFF_EQUAL, commonsuffix]);\n  }\n  diff_cleanupMerge(diffs);\n  if (cursor_pos != null) {\n    diffs = fix_cursor(diffs, cursor_pos);\n  }\n  return diffs;\n};\n\n\n/**\n * Find the differences between two texts.  Assumes that the texts do not\n * have any common prefix or suffix.\n * @param {string} text1 Old string to be diffed.\n * @param {string} text2 New string to be diffed.\n * @return {Array} Array of diff tuples.\n */\nfunction diff_compute_(text1, text2) {\n  var diffs;\n\n  if (!text1) {\n    // Just add some text (speedup).\n    return [[DIFF_INSERT, text2]];\n  }\n\n  if (!text2) {\n    // Just delete some text (speedup).\n    return [[DIFF_DELETE, text1]];\n  }\n\n  var longtext = text1.length > text2.length ? text1 : text2;\n  var shorttext = text1.length > text2.length ? text2 : text1;\n  var i = longtext.indexOf(shorttext);\n  if (i != -1) {\n    // Shorter text is inside the longer text (speedup).\n    diffs = [[DIFF_INSERT, longtext.substring(0, i)],\n             [DIFF_EQUAL, shorttext],\n             [DIFF_INSERT, longtext.substring(i + shorttext.length)]];\n    // Swap insertions for deletions if diff is reversed.\n    if (text1.length > text2.length) {\n      diffs[0][0] = diffs[2][0] = DIFF_DELETE;\n    }\n    return diffs;\n  }\n\n  if (shorttext.length == 1) {\n    // Single character string.\n    // After the previous speedup, the character can't be an equality.\n    return [[DIFF_DELETE, text1], [DIFF_INSERT, text2]];\n  }\n\n  // Check to see if the problem can be split in two.\n  var hm = diff_halfMatch_(text1, text2);\n  if (hm) {\n    // A half-match was found, sort out the return data.\n    var text1_a = hm[0];\n    var text1_b = hm[1];\n    var text2_a = hm[2];\n    var text2_b = hm[3];\n    var mid_common = hm[4];\n    // Send both pairs off for separate processing.\n    var diffs_a = diff_main(text1_a, text2_a);\n    var diffs_b = diff_main(text1_b, text2_b);\n    // Merge the results.\n    return diffs_a.concat([[DIFF_EQUAL, mid_common]], diffs_b);\n  }\n\n  return diff_bisect_(text1, text2);\n};\n\n\n/**\n * Find the 'middle snake' of a diff, split the problem in two\n * and return the recursively constructed diff.\n * See Myers 1986 paper: An O(ND) Difference Algorithm and Its Variations.\n * @param {string} text1 Old string to be diffed.\n * @param {string} text2 New string to be diffed.\n * @return {Array} Array of diff tuples.\n * @private\n */\nfunction diff_bisect_(text1, text2) {\n  // Cache the text lengths to prevent multiple calls.\n  var text1_length = text1.length;\n  var text2_length = text2.length;\n  var max_d = Math.ceil((text1_length + text2_length) / 2);\n  var v_offset = max_d;\n  var v_length = 2 * max_d;\n  var v1 = new Array(v_length);\n  var v2 = new Array(v_length);\n  // Setting all elements to -1 is faster in Chrome & Firefox than mixing\n  // integers and undefined.\n  for (var x = 0; x < v_length; x++) {\n    v1[x] = -1;\n    v2[x] = -1;\n  }\n  v1[v_offset + 1] = 0;\n  v2[v_offset + 1] = 0;\n  var delta = text1_length - text2_length;\n  // If the total number of characters is odd, then the front path will collide\n  // with the reverse path.\n  var front = (delta % 2 != 0);\n  // Offsets for start and end of k loop.\n  // Prevents mapping of space beyond the grid.\n  var k1start = 0;\n  var k1end = 0;\n  var k2start = 0;\n  var k2end = 0;\n  for (var d = 0; d < max_d; d++) {\n    // Walk the front path one step.\n    for (var k1 = -d + k1start; k1 <= d - k1end; k1 += 2) {\n      var k1_offset = v_offset + k1;\n      var x1;\n      if (k1 == -d || (k1 != d && v1[k1_offset - 1] < v1[k1_offset + 1])) {\n        x1 = v1[k1_offset + 1];\n      } else {\n        x1 = v1[k1_offset - 1] + 1;\n      }\n      var y1 = x1 - k1;\n      while (x1 < text1_length && y1 < text2_length &&\n             text1.charAt(x1) == text2.charAt(y1)) {\n        x1++;\n        y1++;\n      }\n      v1[k1_offset] = x1;\n      if (x1 > text1_length) {\n        // Ran off the right of the graph.\n        k1end += 2;\n      } else if (y1 > text2_length) {\n        // Ran off the bottom of the graph.\n        k1start += 2;\n      } else if (front) {\n        var k2_offset = v_offset + delta - k1;\n        if (k2_offset >= 0 && k2_offset < v_length && v2[k2_offset] != -1) {\n          // Mirror x2 onto top-left coordinate system.\n          var x2 = text1_length - v2[k2_offset];\n          if (x1 >= x2) {\n            // Overlap detected.\n            return diff_bisectSplit_(text1, text2, x1, y1);\n          }\n        }\n      }\n    }\n\n    // Walk the reverse path one step.\n    for (var k2 = -d + k2start; k2 <= d - k2end; k2 += 2) {\n      var k2_offset = v_offset + k2;\n      var x2;\n      if (k2 == -d || (k2 != d && v2[k2_offset - 1] < v2[k2_offset + 1])) {\n        x2 = v2[k2_offset + 1];\n      } else {\n        x2 = v2[k2_offset - 1] + 1;\n      }\n      var y2 = x2 - k2;\n      while (x2 < text1_length && y2 < text2_length &&\n             text1.charAt(text1_length - x2 - 1) ==\n             text2.charAt(text2_length - y2 - 1)) {\n        x2++;\n        y2++;\n      }\n      v2[k2_offset] = x2;\n      if (x2 > text1_length) {\n        // Ran off the left of the graph.\n        k2end += 2;\n      } else if (y2 > text2_length) {\n        // Ran off the top of the graph.\n        k2start += 2;\n      } else if (!front) {\n        var k1_offset = v_offset + delta - k2;\n        if (k1_offset >= 0 && k1_offset < v_length && v1[k1_offset] != -1) {\n          var x1 = v1[k1_offset];\n          var y1 = v_offset + x1 - k1_offset;\n          // Mirror x2 onto top-left coordinate system.\n          x2 = text1_length - x2;\n          if (x1 >= x2) {\n            // Overlap detected.\n            return diff_bisectSplit_(text1, text2, x1, y1);\n          }\n        }\n      }\n    }\n  }\n  // Diff took too long and hit the deadline or\n  // number of diffs equals number of characters, no commonality at all.\n  return [[DIFF_DELETE, text1], [DIFF_INSERT, text2]];\n};\n\n\n/**\n * Given the location of the 'middle snake', split the diff in two parts\n * and recurse.\n * @param {string} text1 Old string to be diffed.\n * @param {string} text2 New string to be diffed.\n * @param {number} x Index of split point in text1.\n * @param {number} y Index of split point in text2.\n * @return {Array} Array of diff tuples.\n */\nfunction diff_bisectSplit_(text1, text2, x, y) {\n  var text1a = text1.substring(0, x);\n  var text2a = text2.substring(0, y);\n  var text1b = text1.substring(x);\n  var text2b = text2.substring(y);\n\n  // Compute both diffs serially.\n  var diffs = diff_main(text1a, text2a);\n  var diffsb = diff_main(text1b, text2b);\n\n  return diffs.concat(diffsb);\n};\n\n\n/**\n * Determine the common prefix of two strings.\n * @param {string} text1 First string.\n * @param {string} text2 Second string.\n * @return {number} The number of characters common to the start of each\n *     string.\n */\nfunction diff_commonPrefix(text1, text2) {\n  // Quick check for common null cases.\n  if (!text1 || !text2 || text1.charAt(0) != text2.charAt(0)) {\n    return 0;\n  }\n  // Binary search.\n  // Performance analysis: http://neil.fraser.name/news/2007/10/09/\n  var pointermin = 0;\n  var pointermax = Math.min(text1.length, text2.length);\n  var pointermid = pointermax;\n  var pointerstart = 0;\n  while (pointermin < pointermid) {\n    if (text1.substring(pointerstart, pointermid) ==\n        text2.substring(pointerstart, pointermid)) {\n      pointermin = pointermid;\n      pointerstart = pointermin;\n    } else {\n      pointermax = pointermid;\n    }\n    pointermid = Math.floor((pointermax - pointermin) / 2 + pointermin);\n  }\n  return pointermid;\n};\n\n\n/**\n * Determine the common suffix of two strings.\n * @param {string} text1 First string.\n * @param {string} text2 Second string.\n * @return {number} The number of characters common to the end of each string.\n */\nfunction diff_commonSuffix(text1, text2) {\n  // Quick check for common null cases.\n  if (!text1 || !text2 ||\n      text1.charAt(text1.length - 1) != text2.charAt(text2.length - 1)) {\n    return 0;\n  }\n  // Binary search.\n  // Performance analysis: http://neil.fraser.name/news/2007/10/09/\n  var pointermin = 0;\n  var pointermax = Math.min(text1.length, text2.length);\n  var pointermid = pointermax;\n  var pointerend = 0;\n  while (pointermin < pointermid) {\n    if (text1.substring(text1.length - pointermid, text1.length - pointerend) ==\n        text2.substring(text2.length - pointermid, text2.length - pointerend)) {\n      pointermin = pointermid;\n      pointerend = pointermin;\n    } else {\n      pointermax = pointermid;\n    }\n    pointermid = Math.floor((pointermax - pointermin) / 2 + pointermin);\n  }\n  return pointermid;\n};\n\n\n/**\n * Do the two texts share a substring which is at least half the length of the\n * longer text?\n * This speedup can produce non-minimal diffs.\n * @param {string} text1 First string.\n * @param {string} text2 Second string.\n * @return {Array.<string>} Five element Array, containing the prefix of\n *     text1, the suffix of text1, the prefix of text2, the suffix of\n *     text2 and the common middle.  Or null if there was no match.\n */\nfunction diff_halfMatch_(text1, text2) {\n  var longtext = text1.length > text2.length ? text1 : text2;\n  var shorttext = text1.length > text2.length ? text2 : text1;\n  if (longtext.length < 4 || shorttext.length * 2 < longtext.length) {\n    return null;  // Pointless.\n  }\n\n  /**\n   * Does a substring of shorttext exist within longtext such that the substring\n   * is at least half the length of longtext?\n   * Closure, but does not reference any external variables.\n   * @param {string} longtext Longer string.\n   * @param {string} shorttext Shorter string.\n   * @param {number} i Start index of quarter length substring within longtext.\n   * @return {Array.<string>} Five element Array, containing the prefix of\n   *     longtext, the suffix of longtext, the prefix of shorttext, the suffix\n   *     of shorttext and the common middle.  Or null if there was no match.\n   * @private\n   */\n  function diff_halfMatchI_(longtext, shorttext, i) {\n    // Start with a 1/4 length substring at position i as a seed.\n    var seed = longtext.substring(i, i + Math.floor(longtext.length / 4));\n    var j = -1;\n    var best_common = '';\n    var best_longtext_a, best_longtext_b, best_shorttext_a, best_shorttext_b;\n    while ((j = shorttext.indexOf(seed, j + 1)) != -1) {\n      var prefixLength = diff_commonPrefix(longtext.substring(i),\n                                           shorttext.substring(j));\n      var suffixLength = diff_commonSuffix(longtext.substring(0, i),\n                                           shorttext.substring(0, j));\n      if (best_common.length < suffixLength + prefixLength) {\n        best_common = shorttext.substring(j - suffixLength, j) +\n            shorttext.substring(j, j + prefixLength);\n        best_longtext_a = longtext.substring(0, i - suffixLength);\n        best_longtext_b = longtext.substring(i + prefixLength);\n        best_shorttext_a = shorttext.substring(0, j - suffixLength);\n        best_shorttext_b = shorttext.substring(j + prefixLength);\n      }\n    }\n    if (best_common.length * 2 >= longtext.length) {\n      return [best_longtext_a, best_longtext_b,\n              best_shorttext_a, best_shorttext_b, best_common];\n    } else {\n      return null;\n    }\n  }\n\n  // First check if the second quarter is the seed for a half-match.\n  var hm1 = diff_halfMatchI_(longtext, shorttext,\n                             Math.ceil(longtext.length / 4));\n  // Check again based on the third quarter.\n  var hm2 = diff_halfMatchI_(longtext, shorttext,\n                             Math.ceil(longtext.length / 2));\n  var hm;\n  if (!hm1 && !hm2) {\n    return null;\n  } else if (!hm2) {\n    hm = hm1;\n  } else if (!hm1) {\n    hm = hm2;\n  } else {\n    // Both matched.  Select the longest.\n    hm = hm1[4].length > hm2[4].length ? hm1 : hm2;\n  }\n\n  // A half-match was found, sort out the return data.\n  var text1_a, text1_b, text2_a, text2_b;\n  if (text1.length > text2.length) {\n    text1_a = hm[0];\n    text1_b = hm[1];\n    text2_a = hm[2];\n    text2_b = hm[3];\n  } else {\n    text2_a = hm[0];\n    text2_b = hm[1];\n    text1_a = hm[2];\n    text1_b = hm[3];\n  }\n  var mid_common = hm[4];\n  return [text1_a, text1_b, text2_a, text2_b, mid_common];\n};\n\n\n/**\n * Reorder and merge like edit sections.  Merge equalities.\n * Any edit section can move as long as it doesn't cross an equality.\n * @param {Array} diffs Array of diff tuples.\n */\nfunction diff_cleanupMerge(diffs) {\n  diffs.push([DIFF_EQUAL, '']);  // Add a dummy entry at the end.\n  var pointer = 0;\n  var count_delete = 0;\n  var count_insert = 0;\n  var text_delete = '';\n  var text_insert = '';\n  var commonlength;\n  while (pointer < diffs.length) {\n    switch (diffs[pointer][0]) {\n      case DIFF_INSERT:\n        count_insert++;\n        text_insert += diffs[pointer][1];\n        pointer++;\n        break;\n      case DIFF_DELETE:\n        count_delete++;\n        text_delete += diffs[pointer][1];\n        pointer++;\n        break;\n      case DIFF_EQUAL:\n        // Upon reaching an equality, check for prior redundancies.\n        if (count_delete + count_insert > 1) {\n          if (count_delete !== 0 && count_insert !== 0) {\n            // Factor out any common prefixies.\n            commonlength = diff_commonPrefix(text_insert, text_delete);\n            if (commonlength !== 0) {\n              if ((pointer - count_delete - count_insert) > 0 &&\n                  diffs[pointer - count_delete - count_insert - 1][0] ==\n                  DIFF_EQUAL) {\n                diffs[pointer - count_delete - count_insert - 1][1] +=\n                    text_insert.substring(0, commonlength);\n              } else {\n                diffs.splice(0, 0, [DIFF_EQUAL,\n                                    text_insert.substring(0, commonlength)]);\n                pointer++;\n              }\n              text_insert = text_insert.substring(commonlength);\n              text_delete = text_delete.substring(commonlength);\n            }\n            // Factor out any common suffixies.\n            commonlength = diff_commonSuffix(text_insert, text_delete);\n            if (commonlength !== 0) {\n              diffs[pointer][1] = text_insert.substring(text_insert.length -\n                  commonlength) + diffs[pointer][1];\n              text_insert = text_insert.substring(0, text_insert.length -\n                  commonlength);\n              text_delete = text_delete.substring(0, text_delete.length -\n                  commonlength);\n            }\n          }\n          // Delete the offending records and add the merged ones.\n          if (count_delete === 0) {\n            diffs.splice(pointer - count_insert,\n                count_delete + count_insert, [DIFF_INSERT, text_insert]);\n          } else if (count_insert === 0) {\n            diffs.splice(pointer - count_delete,\n                count_delete + count_insert, [DIFF_DELETE, text_delete]);\n          } else {\n            diffs.splice(pointer - count_delete - count_insert,\n                count_delete + count_insert, [DIFF_DELETE, text_delete],\n                [DIFF_INSERT, text_insert]);\n          }\n          pointer = pointer - count_delete - count_insert +\n                    (count_delete ? 1 : 0) + (count_insert ? 1 : 0) + 1;\n        } else if (pointer !== 0 && diffs[pointer - 1][0] == DIFF_EQUAL) {\n          // Merge this equality with the previous one.\n          diffs[pointer - 1][1] += diffs[pointer][1];\n          diffs.splice(pointer, 1);\n        } else {\n          pointer++;\n        }\n        count_insert = 0;\n        count_delete = 0;\n        text_delete = '';\n        text_insert = '';\n        break;\n    }\n  }\n  if (diffs[diffs.length - 1][1] === '') {\n    diffs.pop();  // Remove the dummy entry at the end.\n  }\n\n  // Second pass: look for single edits surrounded on both sides by equalities\n  // which can be shifted sideways to eliminate an equality.\n  // e.g: A<ins>BA</ins>C -> <ins>AB</ins>AC\n  var changes = false;\n  pointer = 1;\n  // Intentionally ignore the first and last element (don't need checking).\n  while (pointer < diffs.length - 1) {\n    if (diffs[pointer - 1][0] == DIFF_EQUAL &&\n        diffs[pointer + 1][0] == DIFF_EQUAL) {\n      // This is a single edit surrounded by equalities.\n      if (diffs[pointer][1].substring(diffs[pointer][1].length -\n          diffs[pointer - 1][1].length) == diffs[pointer - 1][1]) {\n        // Shift the edit over the previous equality.\n        diffs[pointer][1] = diffs[pointer - 1][1] +\n            diffs[pointer][1].substring(0, diffs[pointer][1].length -\n                                        diffs[pointer - 1][1].length);\n        diffs[pointer + 1][1] = diffs[pointer - 1][1] + diffs[pointer + 1][1];\n        diffs.splice(pointer - 1, 1);\n        changes = true;\n      } else if (diffs[pointer][1].substring(0, diffs[pointer + 1][1].length) ==\n          diffs[pointer + 1][1]) {\n        // Shift the edit over the next equality.\n        diffs[pointer - 1][1] += diffs[pointer + 1][1];\n        diffs[pointer][1] =\n            diffs[pointer][1].substring(diffs[pointer + 1][1].length) +\n            diffs[pointer + 1][1];\n        diffs.splice(pointer + 1, 1);\n        changes = true;\n      }\n    }\n    pointer++;\n  }\n  // If shifts were made, the diff needs reordering and another shift sweep.\n  if (changes) {\n    diff_cleanupMerge(diffs);\n  }\n};\n\n\n/*\n * Modify a diff such that the cursor position points to the start of a change:\n * E.g.\n *   cursor_normalize_diff([[DIFF_EQUAL, 'abc']], 1)\n *     => [1, [[DIFF_EQUAL, 'a'], [DIFF_EQUAL, 'bc']]]\n *   cursor_normalize_diff([[DIFF_INSERT, 'new'], [DIFF_DELETE, 'xyz']], 2)\n *     => [2, [[DIFF_INSERT, 'new'], [DIFF_DELETE, 'xy'], [DIFF_DELETE, 'z']]]\n *\n * @param {Array} diffs Array of diff tuples\n * @param {Int} cursor_pos Suggested edit position. Must not be out of bounds!\n * @return {Array} A tuple [cursor location in the modified diff, modified diff]\n */\nfunction cursor_normalize_diff (diffs, cursor_pos) {\n  if (cursor_pos === 0) {\n    return [DIFF_EQUAL, diffs];\n  }\n  for (var current_pos = 0, i = 0; i < diffs.length; i++) {\n    var d = diffs[i];\n    if (d[0] === DIFF_DELETE || d[0] === DIFF_EQUAL) {\n      var next_pos = current_pos + d[1].length;\n      if (cursor_pos === next_pos) {\n        return [i + 1, diffs];\n      } else if (cursor_pos < next_pos) {\n        // copy to prevent side effects\n        diffs = diffs.slice();\n        // split d into two diff changes\n        var split_pos = cursor_pos - current_pos;\n        var d_left = [d[0], d[1].slice(0, split_pos)];\n        var d_right = [d[0], d[1].slice(split_pos)];\n        diffs.splice(i, 1, d_left, d_right);\n        return [i + 1, diffs];\n      } else {\n        current_pos = next_pos;\n      }\n    }\n  }\n  throw new Error('cursor_pos is out of bounds!')\n}\n\n/*\n * Modify a diff such that the edit position is \"shifted\" to the proposed edit location (cursor_position).\n *\n * Case 1)\n *   Check if a naive shift is possible:\n *     [0, X], [ 1, Y] -> [ 1, Y], [0, X]    (if X + Y === Y + X)\n *     [0, X], [-1, Y] -> [-1, Y], [0, X]    (if X + Y === Y + X) - holds same result\n * Case 2)\n *   Check if the following shifts are possible:\n *     [0, 'pre'], [ 1, 'prefix'] -> [ 1, 'pre'], [0, 'pre'], [ 1, 'fix']\n *     [0, 'pre'], [-1, 'prefix'] -> [-1, 'pre'], [0, 'pre'], [-1, 'fix']\n *         ^            ^\n *         d          d_next\n *\n * @param {Array} diffs Array of diff tuples\n * @param {Int} cursor_pos Suggested edit position. Must not be out of bounds!\n * @return {Array} Array of diff tuples\n */\nfunction fix_cursor (diffs, cursor_pos) {\n  var norm = cursor_normalize_diff(diffs, cursor_pos);\n  var ndiffs = norm[1];\n  var cursor_pointer = norm[0];\n  var d = ndiffs[cursor_pointer];\n  var d_next = ndiffs[cursor_pointer + 1];\n\n  if (d == null) {\n    // Text was deleted from end of original string,\n    // cursor is now out of bounds in new string\n    return diffs;\n  } else if (d[0] !== DIFF_EQUAL) {\n    // A modification happened at the cursor location.\n    // This is the expected outcome, so we can return the original diff.\n    return diffs;\n  } else {\n    if (d_next != null && d[1] + d_next[1] === d_next[1] + d[1]) {\n      // Case 1)\n      // It is possible to perform a naive shift\n      ndiffs.splice(cursor_pointer, 2, d_next, d)\n      return merge_tuples(ndiffs, cursor_pointer, 2)\n    } else if (d_next != null && d_next[1].indexOf(d[1]) === 0) {\n      // Case 2)\n      // d[1] is a prefix of d_next[1]\n      // We can assume that d_next[0] !== 0, since d[0] === 0\n      // Shift edit locations..\n      ndiffs.splice(cursor_pointer, 2, [d_next[0], d[1]], [0, d[1]]);\n      var suffix = d_next[1].slice(d[1].length);\n      if (suffix.length > 0) {\n        ndiffs.splice(cursor_pointer + 2, 0, [d_next[0], suffix]);\n      }\n      return merge_tuples(ndiffs, cursor_pointer, 3)\n    } else {\n      // Not possible to perform any modification\n      return diffs;\n    }\n  }\n\n}\n\n/*\n * Try to merge tuples with their neigbors in a given range.\n * E.g. [0, 'a'], [0, 'b'] -> [0, 'ab']\n *\n * @param {Array} diffs Array of diff tuples.\n * @param {Int} start Position of the first element to merge (diffs[start] is also merged with diffs[start - 1]).\n * @param {Int} length Number of consecutive elements to check.\n * @return {Array} Array of merged diff tuples.\n */\nfunction merge_tuples (diffs, start, length) {\n  // Check from (start-1) to (start+length).\n  for (var i = start + length - 1; i >= 0 && i >= start - 1; i--) {\n    if (i + 1 < diffs.length) {\n      var left_d = diffs[i];\n      var right_d = diffs[i+1];\n      if (left_d[0] === right_d[1]) {\n        diffs.splice(i, 2, [left_d[0], left_d[1] + right_d[1]]);\n      }\n    }\n  }\n  return diffs;\n}\n\n\nmodule.exports = require.diff = {\n    diff_convert_to_my_format,\n    diff_main\n}\n"
  },
  {
    "path": "util/require.js",
    "content": "// These 8 lines let browsers import modules with require().\nfunction require (thing) {\n    thing = thing.split('/')\n    thing = thing[thing.length-1]\n    if (thing.slice(-3) === '.js')\n        thing = thing.slice(0,-3)\n    console.assert(require[thing], `require(\"${thing}\") failed because <script src=\"${thing}\"> is not working.`)\n    return require[thing]\n}\n//global = self\nmodule = {exports: {}}\n"
  },
  {
    "path": "util/utilities.js",
    "content": "// ===============================================\n//\n//   Utilities\n//\n\nis_browser = typeof process !== 'object' || typeof global !== 'object'\nterminal_width = _ => (!is_browser && process.stdout.columns) || 80\nshow_protocol_errors = false\nnlogf = (protocol, from, symbol, to, msg) => {\n    let stringy = JSON.stringify(msg, function(k, v) {\n        if (k === 'method')\n            return undefined;\n        return v;\n    });\n    nlog(\n        `${protocol}: ${from} ${symbol} ${to}`,\n        msg.method.toUpperCase().padEnd(7),\n        stringy.substr(0, terminal_width() - 30)\n    )\n}\n\n// dict() is an alternative to {}.  It creates a clean hash table without any\n// pre-existing keys, like .constructor or .prototype that are built into\n// Javascript Objects.\nvar dict = () => Object.create(null)\n\nmodule.exports = require.utilities = {\n    dict: dict,\n    random_id: () => Math.random().toString(36).substr(2),\n\n    // Maps a key to a set of values.\n    //\n    // If the value is not hashable, you can provide its hash using k2.\n    one_to_many: () => {\n        var data   = dict()\n        var counts = dict()\n        return {\n            get (k) { return Object.values(data[k] || dict()) },\n            add (k1, k2, value) {\n                assert(value, \"one-to-many.add() requires three parameters\")\n                if (  data[k1] === undefined)   data[k1] = dict()\n                if (counts[k1] === undefined) counts[k1] = 0\n                if (!data[k1][k2]) counts[k1]++\n                data[k1][k2] = value\n            },\n            delete (k, k2) { delete data[k][k2]; counts[k]-- },\n            delete_all (k) { delete data[k]; delete counts[k] },\n            has (k, k2)    { return data[k] && k2 in data[k] },\n            count (k)      { return counts[k] || 0},\n            toString ()    { return JSON.stringify({data, counts}, null, '    ') }\n        }\n    },\n    deep_equals,\n    has_keep_alive: (origin, key) => {\n        var s = origin.subscribed_keys && origin.subscribed_keys[key]\n        return s && ((s.we_requested && s.we_requested.keep_alive)\n                ||\n                (s.they_requested && s.they_requested.keep_alive))\n    },\n    parse_patch\n}\n\nif (is_browser)\n    assert = console.assert\nelse\n    assert = require('assert')\n// assert = function () {\n//     if (!arguments[0]) {\n//         console.trace.apply(console, ['-Assert-', ...[...arguments].slice(1)])\n//         // if (this.process)\n//         //     process.exit()\n//         // else\n//             throw 'Bad'\n//     }\n// }\n\nif (typeof show_debug === 'undefined')\n    // This defaults to false\n    show_debug = false\nlog = function () {\n    if (show_debug)\n        return console.log.apply(console, arguments)\n}\nprint_network = !is_browser && process.argv.includes(\"--network\")\nnlog = function () {\n    if (show_debug || print_network)\n        return console.log.apply(console, arguments)\n}\n\nfunction deep_equals(a, b) {\n    if (typeof(a) != 'object' || typeof(b) != 'object' || a == null || b == null) return a == b\n    if (Array.isArray(a)) {\n        if (!Array.isArray(b)) return false\n        if (a.length != b.length) return false\n        for (var i = 0; i < a.length; i++)\n            if (!deep_equals(a[i], b[i])) return false\n        return true\n    }\n    var ak = Object.keys(a).sort()\n    var bk = Object.keys(b).sort()\n    if (ak.length != bk.length) return false\n    for (var k of ak)\n        if (!deep_equals(a[k], b[k])) return false\n    return true\n}\n\nfunction parse_patch(patch) {\n    var ret = { path : [] }\n    var re = /^(delete)\\s+|\\.?([^\\.\\[ =]+)|\\[((\\-?\\d+)(:\\-?\\d+)?|'(\\\\'|[^'])*'|\"(\\\\\"|[^\"])*\")\\]|\\s*=\\s*([\\s\\S]*)/g\n    var m\n    while (m = re.exec(patch)) {\n        if (m[1])\n            ret.delete = true\n        else if (m[2])\n            ret.path.push(m[2])\n        else if (m[3] && m[5])\n            ret.slice = [\n                JSON.parse(m[4]),\n                JSON.parse(m[5].substr(1))\n            ]\n        else if (m[3])\n            ret.path.push(JSON.parse(m[3]))\n        else if (m[8]) {\n            // What is this case for?  Can we have an example?\n            ret.value = JSON.parse(m[8])\n            recurse(ret.value)\n            function recurse(x) {\n                if (x && typeof(x) == 'object') {\n                    if (x instanceof Array) {\n                        for (var i = 0; i < x.length; i++) recurse(x[i])\n                    } else {\n                        if (Object.keys(x).find(k => k == 'type' && x[k] == 'location')) {\n                            x.id = Math.random().toString(36).slice(2)\n\n                            ret.annotations = ret.annotations || {}\n                            var path = parse_patch(x.path).path\n                            ret.annotations[x.id] = {\n                                path: path.slice(0, path.length - 1),\n                                pos: path[path.length - 1]\n                            }\n                        } else for (let k of Object.keys(x)) recurse(x[k])\n                    }\n                }\n            }\n        }\n    }\n    return ret\n}\n\n\n// ===============================================\n//\n//   Random number generator\n//\n//     This customized random number generator can be seeded, to\n//     produce deterministic results.\n//\n//     That way, we can reproduce test-cases, and debug them.\n//\n{\n    // These two functions are added by Glittle.\n    Math.create_rand = function (seed) {\n        if (typeof(seed) == 'string') {\n            var t = new MersenneTwister(0)\n            var a = []\n            for (var i = 0; i < seed.length; i++)\n                a[i] = seed.charCodeAt(i)\n            t.init_by_array(a, a.length)\n        } else if (Array.isArray(seed)) {\n            var t = new MersenneTwister(0)\n            t.init_by_array(seed, seed.length)\n        } else if (typeof(seed) == 'number') {\n            var t = new MersenneTwister(seed)\n        } else {\n            var t = new MersenneTwister()\n        }\n        function func() {\n            return t.random()\n        }\n        func.get_state = () => {\n            var a = t.mt.slice(0)\n            a.push(t.mti)\n            return JSON.stringify(a)\n        }\n        func.set_state = s => {\n            var a = JSON.parse(s)\n            t.mt = a.slice(0, a.length - 1)\n            t.mti = a[a.length - 1]\n        }\n        return func\n    }\n      \n    Math.randomSeed = function (seed) {\n        Math.random = Math.create_rand(seed)\n    }\n    // Those previous two functions added by Glittle\n\n\n    /* The following piece of code is an implementation of MersenneTwister object\n       taken from https://gist.github.com/banksean/300494, with one method \n       xor_array(array, size) added.\n    */\n\n    /*\n      I've wrapped Makoto Matsumoto and Takuji Nishimura's code in a namespace\n      so it's better encapsulated. Now you can have multiple random number generators\n      and they won't stomp all over eachother's state.\n\n      If you want to use this as a substitute for Math.random(), use the random()\n      method like so:\n\n      var m = new MersenneTwister();\n      var randomNumber = m.random();\n\n      You can also call the other genrand_{foo}() methods on the instance.\n\n      If you want to use a specific seed in order to get a repeatable random\n      sequence, pass an integer into the constructor:\n\n      var m = new MersenneTwister(123);\n\n      and that will always produce the same random sequence.\n\n      Sean McCullough (banksean@gmail.com)\n    */\n\n    /* \n       A C-program for MT19937, with initialization improved 2002/1/26.\n       Coded by Takuji Nishimura and Makoto Matsumoto.\n\n       Before using, initialize the state by using init_genrand(seed)  \n       or init_by_array(init_key, key_length).\n\n       Copyright (C) 1997 - 2002, Makoto Matsumoto and Takuji Nishimura,\n       All rights reserved.                          \n\n       Redistribution and use in source and binary forms, with or without\n       modification, are permitted provided that the following conditions\n       are met:\n\n       1. Redistributions of source code must retain the above copyright\n       notice, this list of conditions and the following disclaimer.\n\n       2. Redistributions in binary form must reproduce the above copyright\n       notice, this list of conditions and the following disclaimer in the\n       documentation and/or other materials provided with the distribution.\n\n       3. The names of its contributors may not be used to endorse or promote \n       products derived from this software without specific prior written \n       permission.\n\n       THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n       \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n       LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n       A PARTICULAR PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR\n       CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,\n       EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,\n       PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR\n       PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF\n       LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n       NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\n       SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n       Any feedback is very welcome.\n       http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/emt.html\n       email: m-mat @ math.sci.hiroshima-u.ac.jp (remove space)\n    */\n\n    var MersenneTwister = function(seed) {\n        if (seed == undefined) {\n            seed = new Date().getTime();\n        } \n        /* Period parameters */  \n        this.N = 624;\n        this.M = 397;\n        this.MATRIX_A = 0x9908b0df;   /* constant vector a */\n        this.UPPER_MASK = 0x80000000; /* most significant w-r bits */\n        this.LOWER_MASK = 0x7fffffff; /* least significant r bits */\n\n        this.mt = new Array(this.N); /* the array for the state vector */\n        this.mti=this.N+1; /* mti==N+1 means mt[N] is not initialized */\n\n        this.init_genrand(seed);\n    }  \n\n    /* initializes mt[N] with a seed */\n    MersenneTwister.prototype.init_genrand = function(s) {\n        this.mt[0] = s >>> 0;\n        for (this.mti=1; this.mti<this.N; this.mti++) {\n            var s = this.mt[this.mti-1] ^ (this.mt[this.mti-1] >>> 30);\n            this.mt[this.mti] = (((((s & 0xffff0000) >>> 16) * 1812433253) << 16) + (s & 0x0000ffff) * 1812433253)\n                + this.mti;\n            /* See Knuth TAOCP Vol2. 3rd Ed. P.106 for multiplier. */\n            /* In the previous versions, MSBs of the seed affect   */\n            /* only MSBs of the array mt[].                        */\n            /* 2002/01/09 modified by Makoto Matsumoto             */\n            this.mt[this.mti] >>>= 0;\n            /* for >32 bit machines */\n        }\n    }\n\n    /* initialize by an array with array-length */\n    /* init_key is the array for initializing keys */\n    /* key_length is its length */\n    /* slight change for C++, 2004/2/26 */\n    MersenneTwister.prototype.init_by_array = function(init_key, key_length) {\n        var i, j, k;\n        this.init_genrand(19650218);\n        i=1; j=0;\n        k = (this.N>key_length ? this.N : key_length);\n        for (; k; k--) {\n            var s = this.mt[i-1] ^ (this.mt[i-1] >>> 30)\n            this.mt[i] = (this.mt[i] ^ (((((s & 0xffff0000) >>> 16) * 1664525) << 16) + ((s & 0x0000ffff) * 1664525)))\n                + init_key[j] + j; /* non linear */\n            this.mt[i] >>>= 0; /* for WORDSIZE > 32 machines */\n            i++; j++;\n            if (i>=this.N) { this.mt[0] = this.mt[this.N-1]; i=1; }\n            if (j>=key_length) j=0;\n        }\n        for (k=this.N-1; k; k--) {\n            var s = this.mt[i-1] ^ (this.mt[i-1] >>> 30);\n            this.mt[i] = (this.mt[i] ^ (((((s & 0xffff0000) >>> 16) * 1566083941) << 16) + (s & 0x0000ffff) * 1566083941))\n                - i; /* non linear */\n            this.mt[i] >>>= 0; /* for WORDSIZE > 32 machines */\n            i++;\n            if (i>=this.N) { this.mt[0] = this.mt[this.N-1]; i=1; }\n        }\n\n        this.mt[0] = 0x80000000; /* MSB is 1; assuring non-zero initial array */ \n    }\n\n    /* XORs the mt array with a given array xor_key of length key_length */\n    MersenneTwister.prototype.xor_array = function(xor_key, key_length) {\n        var i, j;\n        j = 0;\n        for (i = 0; i < this.N; i++) {\n            this.mt[i] ^= xor_key[j];\n            this.mt[i] >>>= 0;\n            j++;\n            if (j >= key_length) j = 0;\n        }\n    }\n\n    /* generates a random number on [0,0xffffffff]-interval */\n    MersenneTwister.prototype.genrand_int32 = function() {\n        var y;\n        var mag01 = new Array(0x0, this.MATRIX_A);\n        /* mag01[x] = x * MATRIX_A  for x=0,1 */\n\n        if (this.mti >= this.N) { /* generate N words at one time */\n            var kk;\n\n            if (this.mti == this.N+1)   /* if init_genrand() has not been called, */\n                this.init_genrand(5489); /* a default initial seed is used */\n\n            for (kk=0;kk<this.N-this.M;kk++) {\n                y = (this.mt[kk]&this.UPPER_MASK)|(this.mt[kk+1]&this.LOWER_MASK);\n                this.mt[kk] = this.mt[kk+this.M] ^ (y >>> 1) ^ mag01[y & 0x1];\n            }\n            for (;kk<this.N-1;kk++) {\n                y = (this.mt[kk]&this.UPPER_MASK)|(this.mt[kk+1]&this.LOWER_MASK);\n                this.mt[kk] = this.mt[kk+(this.M-this.N)] ^ (y >>> 1) ^ mag01[y & 0x1];\n            }\n            y = (this.mt[this.N-1]&this.UPPER_MASK)|(this.mt[0]&this.LOWER_MASK);\n            this.mt[this.N-1] = this.mt[this.M-1] ^ (y >>> 1) ^ mag01[y & 0x1];\n\n            this.mti = 0;\n        }\n\n        y = this.mt[this.mti++];\n\n        /* Tempering */\n        y ^= (y >>> 11);\n        y ^= (y << 7) & 0x9d2c5680;\n        y ^= (y << 15) & 0xefc60000;\n        y ^= (y >>> 18);\n\n        return y >>> 0;\n    }\n\n    /* generates a random number on [0,0x7fffffff]-interval */\n    MersenneTwister.prototype.genrand_int31 = function() {\n        return (this.genrand_int32()>>>1);\n    }\n\n    /* generates a random number on [0,1]-real-interval */\n    MersenneTwister.prototype.genrand_real1 = function() {\n        return this.genrand_int32()*(1.0/4294967295.0); \n        /* divided by 2^32-1 */ \n    }\n\n    /* generates a random number on [0,1)-real-interval */\n    MersenneTwister.prototype.random = function() {\n        return this.genrand_int32()*(1.0/4294967296.0); \n        /* divided by 2^32 */\n    }\n\n    /* generates a random number on (0,1)-real-interval */\n    MersenneTwister.prototype.genrand_real3 = function() {\n        return (this.genrand_int32() + 0.5)*(1.0/4294967296.0); \n        /* divided by 2^32 */\n    }\n\n    /* generates a random number on [0,1) with 53-bit resolution*/\n    MersenneTwister.prototype.genrand_res53 = function() { \n        var a=this.genrand_int32()>>>5, b=this.genrand_int32()>>>6; \n        return(a*67108864.0+b)*(1.0/9007199254740992.0); \n    } \n    /* These real versions are due to Isaku Wada, 2002/01/09 added */\n}"
  },
  {
    "path": "yarnball/server.js",
    "content": "\n// require('child_process').execSync('cp ./log-old.txt ./log.txt', {stdio: 'inherit'})\n// require('child_process').execSync('rm ./log.txt', {stdio: 'inherit'})\n// require('child_process').execSync('ls', {stdio: 'inherit'})\n// throw 'stop'\n\nvar port = 60003\n\nvar looms = {}\nvar get_loom = key => looms[key] || (looms[key] = create_loom_server({id: 'server'}))\n\nvar spawns = {}\n\nvar logfile = './yarnball.txt'\nvar wal_stream = require('fs').createWriteStream(logfile, {flags: 'a'})\nvar wal_append = (key, msg) => wal_stream.write(JSON.stringify({key, msg}) + '\\n')\n// if (require('fs').existsSync(logfile)) {\n//     let lines = ('' + require('fs').readFileSync(logfile)).match(/.+/g)\n\n//     console.log({lines})\n\n//     if (lines) {\n//         looms = JSON.parse(lines.shift())\n//         for (let L of Object.values(looms)) create_loom(L, () => {})\n\n//         for (let line of lines) {\n//             if (!line) continue\n//             let x = JSON.parse(line)\n//             let L = looms[x.key] || (looms[x.key] = create_loom({id: 'server'}, () => {}))\n\n//             if (x.msg.cmd == 'disconnect') L.disconnect(x.msg.peer)\n//             else L.receive(x.msg)\n//         }\n//         for (let L of Object.values(looms)) {\n//             for (let peer of Object.keys(L.peers)) L.disconnect(peer)\n//         }\n\n//         for (let L of Object.values(looms)) create_loom_server(L)\n//     }\n// }\n\nwal_compactor()\nasync function wal_compactor() {\n    process.stdout.write(`<`)\n\n    var filename = `./log_${Math.random().toString(36)}`\n    await require('fs/promises').writeFile(filename, JSON.stringify(looms) + '\\n')\n\n    wal_stream.end()\n    require('fs').renameSync(filename, logfile)\n\n    wal_stream = require('fs').createWriteStream(logfile, {flags: 'a'})    \n\n    process.stdout.write(`>`)\n\n    setTimeout(wal_compactor, 1000 * 60)\n}\n\nvar server = require('https').createServer({\n    key: require('fs').readFileSync('./privkey.pem'),\n    cert: require('fs').readFileSync('./fullchain.pem')\n}, async function (req, res) {\n\n    console.log({method: req.method, url: req.url})\n\n    res.statusCode = 200\n    res.setHeader('Access-Control-Allow-Origin', '*')\n    res.setHeader('Access-Control-Allow-Headers', '*')\n    res.setHeader('Access-Control-Allow-Methods', '*')\n    res.end('ok')\n})\n\nvar wss = new (require('ws').Server)({server})\nwss.on('connection', function connection(ws, req) {\n\n    console.log(`new connection! ${req.url}`)\n\n    var key = req.url.slice(1)\n    get_loom(key).on_conn(ws, key)\n})\n\nserver.listen(port)\nconsole.log(`listening on port ${port}`)\n\nfunction create_loom_server(L) {\n    var conns = {}\n\n    var L = create_loom(L, (to, x) => {\n\n        console.log(JSON.stringify({sending: to, data: x}, null, '    '))\n\n        conns[to].send(JSON.stringify(x))\n    })\n\n    L.on_conn = (ws, key) => {\n        ws.on('message', x => {\n\n            console.log(`RECV: ${x}`)\n\n            x = JSON.parse(x)\n            wal_append(key, x)\n            if (x.cmd == 'get' && !L.peers[x.peer]) {\n                ws.my_peer = x.peer\n                ws.my_conn = x.conn\n                conns[ws.my_peer] = ws\n            }\n            try {\n                L.receive(x)\n            } catch (e) {\n                ws.send(JSON.stringify({cmd: 'error'}))\n            }\n        })\n        ws.on('close', () => {\n            if (ws.my_peer) {\n                wal_append(key, {cmd: 'disconnect', peer: ws.my_peer})\n                delete conns[ws.my_peer]\n                L.disconnect(ws.my_peer)\n            }\n        })\n    }\n\n    return L\n}\n\nfunction create_loom(L, send) {\n    L = L ?? {}\n\n    if (!L.id) L.id = Math.random().toString(36).slice(2)\n    if (!L.next_seq) L.next_seq = 0\n\n    L.S = L.S ?? null\n    L.T = L.T ?? {}\n    L.current_version = L.current_version ?? {}\n\n    L.peers = L.peers ?? {}\n    L.version_cache = L.version_cache ?? {}\n    L.fissures = L.fissures ?? {}\n    L.acked_boundary = L.acked_boundary ?? {}\n    L.unack_boundary = L.unack_boundary ?? {}\n    L.acks_in_process = L.acks_in_process ?? {}\n\n    var orig_send = send\n    send = (to, msg) => {\n        orig_send(to, {peer: L.id, conn: L.peers[to], ...msg})\n    }\n\n    L.get = peer => {\n        send(peer, {cmd: 'get', conn: Math.random().toString(36).slice(2)})\n    }\n\n    L.forget = peer => {\n        send(peer, {cmd: 'forget'})\n    }\n\n    L.disconnect = peer => {\n        if (!L.peers[peer]) return\n        var conn = L.peers[peer]\n        delete L.peers[peer]\n\n        var versions = {}\n        var ack_versions = ancestors(L.acked_boundary)\n        Object.keys(L.T).forEach(v => {\n            if (!ack_versions[v] || L.acked_boundary[v]) versions[v] = true\n        })\n\n        L.receive({cmd: 'fissure', fissure: {a: L.id, b: peer, conn, versions, time: Date.now()}})\n    }\n\n    L.set = (...patches) => {\n        L.receive({cmd: 'set', version: `${L.next_seq++}@${L.id}`, parents: {...L.current_version}, patches})\n    }\n\n    L.read = (is_anc) => {\n        if (!is_anc) is_anc = () => true\n        else if (typeof(is_anc) == 'string') {\n            var ancs = x.ancestors({[is_anc]: true})\n            is_anc = v => ancs[v]\n        } else if (typeof(is_anc) == 'object') {\n            var ancs = x.ancestors(is_anc)\n            is_anc = v => ancs[v]\n        }\n\n        return rec_read(L.S)\n        function rec_read(x) {\n            if (x && typeof(x) == 'object') {\n                if (x.t == 'lit') return JSON.parse(JSON.stringify(x.S))\n                if (x.t == 'val') return rec_read(space_dag_get(x.S, 0, is_anc))\n                if (x.t == 'obj') {\n                    var o = {}\n                    Object.entries(x.S).forEach(([k, v]) => {\n                        var x = rec_read(v)\n                        if (x != null) o[k] = x\n                    })\n                    return o\n                }\n                if (x.t == 'arr') {\n                    var a = []\n                    traverse_space_dag(x.S, is_anc, (node, _, __, ___, ____, deleted) => {\n                        if (!deleted) node.elems.forEach((e) => a.push(rec_read(e)))\n                    }, true)\n                    return a\n                }\n                if (x.t == 'str') {\n                    var s = []\n                    traverse_space_dag(x.S, is_anc, (node, _, __, ___, ____, deleted) => {\n                        if (!deleted) s.push(node.elems)\n                    }, true)\n                    return s.join('')\n                }\n                throw 'bad'\n            } return x\n        }\n    }\n\n    L.receive = ({cmd, version, parents, patches, fissure, versions, fissures, unack_boundary, min_leaves, peer, conn}) => {\n        if (cmd == 'get' || cmd == 'get_back') {\n            if (L.peers[peer]) throw 'bad'\n            L.peers[peer] = conn\n\n            if (cmd == 'get') send(peer, {cmd: 'get_back'})\n            send(peer, {cmd: 'welcome',\n                versions: generate_braid(parents),\n                fissures: Object.values(L.fissures),\n                parents: parents && Object.keys(parents).length ? get_leaves(ancestors(parents, true)) : {}\n            })\n        } else if (cmd == 'forget') {\n            if (!L.peers[peer]) throw 'bad'\n            delete L.peers[peer]\n            L.acks_in_process = {}\n        } else if (cmd == 'set') {\n            for (p in parents) if (!L.T[p]) throw 'bad'\n\n            if (!peer || !L.T[version]) {\n                add_version(version, parents, patches)\n                for (let p of Object.keys(L.peers)) if (p != peer) send(p, {cmd: 'set', version, parents, patches})\n\n                L.acks_in_process[version] = {origin: peer, count: Object.keys(L.peers).length}\n                if (peer) L.acks_in_process[version].count--\n            } else if (L.acks_in_process[version]) L.acks_in_process[version].count--\n\n            check_ack_count(version)\n        } else if (cmd == 'ack1') {\n            if (L.acks_in_process[version]) {\n                L.acks_in_process[version].count--\n                check_ack_count(version)\n            }\n        } else if (cmd == 'ack2') {\n            if (!L.T[version]) return\n            if (ancestors(L.unack_boundary)[version]) return\n            if (ancestors(L.acked_boundary)[version]) return\n            add_full_ack_leaf(version)\n            for (let p of Object.keys(L.peers)) if (p != peer) send(p, {cmd: 'ack2', version})\n        } else if (cmd == 'fissure') {\n            var key = fissure.a + ':' + fissure.b + ':' + fissure.conn\n            if (!L.fissures[key]) {\n                L.fissures[key] = fissure\n                L.acks_in_process = {}\n                for (let p of Object.keys(L.peers)) if (p != peer) send(p, {cmd: 'fissure', fissure})\n                if (fissure.b == L.id) L.receive({cmd: 'fissure', fissure: {...fissure, a: L.id, b: fissure.a}})\n            }\n        } else if (cmd == 'welcome') {\n            var versions_to_add = {}\n            versions.forEach(v => versions_to_add[v.version] = v.parents)\n            versions.forEach(v => {\n                if (L.T[v.version]) {\n                    remove_ancestors(v.version)\n                    function remove_ancestors(v) {\n                        if (versions_to_add[v]) {\n                            Object.keys(versions_to_add[v]).forEach(remove_ancestors)\n                            delete versions_to_add[v]\n                        }\n                    }\n                }\n            })\n\n            var send_error = () => send(peer, {cmd: 'error'})\n\n            var added_versions = []\n            for (var v of versions) {\n                if (versions_to_add[v.version]) {\n                    if (!Object.keys(v.parents).every(p => L.T[p])) return send_error()\n\n                    add_version(v.version, v.parents, v.patches, v.sort_keys)\n                    added_versions.push(v)\n                }\n            }\n\n            if (((min_leaves && Object.keys(min_leaves).some(k => !L.T[k])) || (unack_boundary && Object.keys(unack_boundary).some(k => !L.T[k])))) return send_error()\n\n            var new_fissures = []\n            var gen_fissures = []\n            fissures.forEach(f => {\n                var key = f.a + ':' + f.b + ':' + f.conn\n                if (!L.fissures[key]) {\n\n                    new_fissures.push(f)\n                    L.fissures[key] = f\n\n                    if (f.b == L.id) gen_fissures.push({...f, a: L.id, b: f.a})\n                }\n            })\n\n            if (!unack_boundary) unack_boundary = {...L.current_version}\n\n            var our_conn_versions = ancestors(L.T, L.unack_boundary)\n            var new_conn_versions = ancestors(L.T, unack_boundary)\n\n            Object.keys(L.unack_boundary).forEach(x => {\n                if (new_conn_versions[x] && !unack_boundary[x])\n                    delete L.unack_boundary[x]\n            })\n            Object.keys(unack_boundary).forEach(x => {\n                if (!our_conn_versions[x]) L.unack_boundary[x] = true\n            })\n            \n            if (!min_leaves) {\n                if (versions.length === 0 && (!parents || Object.keys(parents).length === 0))\n                    min_leaves = {...L.current_version}\n                else {\n                    min_leaves = parents ? {...parents} : {}\n                    versions.forEach(v => {\n                        if (!versions_to_add[v.version]) min_leaves[v.version] = true\n                    })\n                    min_leaves = get_leaves(ancestors(min_leaves, true))\n                }\n            }\n\n            var min_versions = ancestors(min_leaves)\n            var ack_versions = ancestors(L.acked_boundary)\n            Object.keys(L.acked_boundary).forEach(x => {\n                if (!min_versions[x]) delete L.acked_boundary[x]\n            })\n            Object.keys(min_leaves).forEach(x => {\n                if (ack_versions[x]) L.acked_boundary[x] = true\n            })\n\n            L.acks_in_process = {}\n\n            if (added_versions.length > 0 || new_fissures.length > 0) {\n                for (let p of Object.keys(L.peers)) if (p != peer) send(p, {cmd: 'welcome', key, versions: added_versions, unack_boundary,min_leaves, fissures: new_fissures})\n            }\n\n            gen_fissures.forEach(f => L.receive({cmd: 'fissure', fissure: f}))\n        }\n    }\n\n    var is_lit = x => !x || typeof(x) != 'object' || x.t == 'lit'\n    var get_lit = x => (x && typeof(x) == 'object' && x.t == 'lit') ? x.S : x\n    let make_lit = x => (x && typeof(x) == 'object') ? {t: 'lit', S: x} : x\n\n    function prune() {\n        var unremovable = {}\n\n        Object.entries(L.fissures).forEach(x => {\n            var other_key = x[1].b + ':' + x[1].a + ':' + x[1].conn\n            var other = L.fissures[other_key]\n            if (other) {\n                delete L.fissures[x[0]]\n                delete L.fissures[other_key]\n            }\n        })\n\n        if (L.fissure_lifetime != null) {\n            var now = Date.now()\n            Object.entries(L.fissures).forEach(([k, f]) => {\n                if (f.time == null) f.time = now\n                if (f.time <= now - L.fissure_lifetime) {\n                    delete L.fissures[k]\n                }\n            })\n        }\n\n        var keep_us = {}\n\n        Object.values(L.fissures).forEach(f => {\n            Object.keys(f.versions).forEach(v => keep_us[v] = true)\n        })\n\n        var acked = ancestors(L.T, L.acked_boundary)\n        Object.keys(L.T).forEach(x => {\n            if (!acked[x] || L.acked_boundary[x]) keep_us[x] = true\n        })\n\n        var children = {}\n        Object.entries(L.T).forEach(([v, parents]) => {\n            Object.keys(parents).forEach(parent => {\n                if (!children[parent]) children[parent] = {}\n                children[parent][v] = true\n            })\n        })\n\n        var to_bubble = {}\n        var bubble_tops = {}\n        var bubble_bottoms = {}\n        \n        function mark_bubble(bottom, top, tag) {\n            if (!to_bubble[bottom]) {\n                to_bubble[bottom] = tag\n                if (bottom !== top) Object.keys(L.T[bottom]).forEach(p => mark_bubble(p, top, tag))\n            }\n        }\n        \n        var done = {}\n        function f(cur) {\n            if (!L.T[cur]) return\n            if (done[cur]) return\n            done[cur] = true\n            \n            if (!to_bubble[cur] || bubble_tops[cur]) {\n                var bubble_top = find_one_bubble(cur)\n                if (bubble_top) {\n                    delete to_bubble[cur]\n                    mark_bubble(cur, bubble_top, bubble_tops[cur] || cur)\n                    bubble_tops[bubble_top] = bubble_tops[cur] || cur\n                    bubble_bottoms[bubble_tops[cur] || cur] = bubble_top\n                }\n            }\n\n            Object.keys(L.T[cur]).forEach(f)\n        }\n        Object.keys(L.current_version).forEach(f)\n\n        function find_one_bubble(cur) {\n            var seen = {[cur]: true}\n            var q = Object.keys(L.T[cur])\n            var expecting = Object.fromEntries(q.map(x => [x, true]))\n            while (q.length) {\n                cur = q.pop()\n                if (!L.T[cur]) return null\n                if (keep_us[cur]) return null\n                if (Object.keys(children[cur]).every(c => seen[c])) {\n                    seen[cur] = true\n                    delete expecting[cur]\n                    if (!Object.keys(expecting).length) return cur\n                    \n                    Object.keys(L.T[cur]).forEach(p => {\n                        q.push(p)\n                        expecting[p] = true\n                    })\n                }\n            }\n            return null\n        }\n\n        to_bubble = Object.fromEntries(Object.entries(to_bubble).map(\n            ([v, bub]) => [v, [bub, bubble_bottoms[bub]]]\n        ))\n\n        apply_bubbles(to_bubble)\n    }\n\n    function add_full_ack_leaf(version) {\n        var marks = {}\n        function f(v) {\n            if (!marks[v]) {\n                marks[v] = true\n                delete L.unack_boundary[v]\n                delete L.acked_boundary[v]\n                delete L.acks_in_process[v]\n                Object.keys(L.T[v]).forEach(f)\n            }\n        }\n        f(version)\n\n        L.acked_boundary[version] = true\n        prune(L)\n    }\n\n    function check_ack_count(version) {\n        if (L.acks_in_process[version] && L.acks_in_process[version].count == 0) {\n            if (L.acks_in_process[version].origin) {\n                send(L.acks_in_process[version].origin, {cmd: 'ack1', version})\n            } else {\n                add_full_ack_leaf(version)\n                for (let p of Object.keys(L.peers)) send(p, {cmd: 'ack2', version})\n            }\n        }\n    }\n\n    function generate_braid(versions) {\n        var anc = versions && Object.keys(versions).length ? ancestors(versions, true) : {}\n        var is_anc = x => anc[x]\n\n        if (Object.keys(L.T).length === 0) return []\n\n        return Object.entries(L.version_cache).filter(x => !is_anc(x[0])).map(([version, set_message]) => {\n            return L.version_cache[version] = set_message || generate_set_message(version)\n        })\n\n        function generate_set_message(version) {\n            if (!Object.keys(L.T[version]).length) {\n                return {\n                    version,\n                    parents: {},\n                    patches: [` = ${JSON.stringify(L.read(v => v == version))}`]\n                }\n            }\n        \n            var is_lit = x => !x || typeof(x) !== 'object' || x.t === 'lit'\n            var get_lit = x => (x && typeof(x) === 'object' && x.t === 'lit') ? x.S : x\n        \n            var ancs = ancestors({[version]: true})\n            delete ancs[version]\n            var is_anc = x => ancs[x]\n            var path = []\n            var patches = []\n            var sort_keys = {}\n            recurse(L.S)\n            function recurse(x) {\n                if (is_lit(x)) {\n                } else if (x.t === 'val') {\n                    space_dag_generate_braid(x.S, version, is_anc).forEach(s => {\n                        if (s[2].length) {\n                            patches.push(`${path.join('')} = ${JSON.stringify(s[2][0])}`)\n                            if (s[3]) sort_keys[patches.length - 1] = s[3]\n                        }\n                    })\n                    traverse_space_dag(x.S, is_anc, node => {\n                        node.elems.forEach(recurse)\n                    })\n                } else if (x.t === 'arr') {\n                    space_dag_generate_braid(x.S, version, is_anc).forEach(s => {\n                        patches.push(`${path.join('')}[${s[0]}:${s[0] + s[1]}] = ${JSON.stringify(s[2])}`)\n                        if (s[3]) sort_keys[patches.length - 1] = s[3]\n                    })\n                    var i = 0\n                    traverse_space_dag(x.S, is_anc, node => {\n                        node.elems.forEach(e => {\n                            path.push(`[${i++}]`)\n                            recurse(e)\n                            path.pop()\n                        })\n                    })\n                } else if (x.t === 'obj') {\n                    Object.entries(x.S).forEach(e => {\n                        path.push('[' + JSON.stringify(e[0]) + ']')\n                        recurse(e[1])\n                        path.pop()\n                    })\n                } else if (x.t === 'str') {\n                    space_dag_generate_braid(x.S, version, is_anc).forEach(s => {\n                        patches.push(`${path.join('')}[${s[0]}:${s[0] + s[1]}] = ${JSON.stringify(s[2])}`)\n                        if (s[3]) sort_keys[patches.length - 1] = s[3]\n                    })\n                }\n            }\n        \n            return {\n                version,\n                parents: {...L.T[version]},\n                patches,\n                sort_keys\n            }\n        }\n    }\n    L.generate_braid = generate_braid\n\n    function space_dag_generate_braid(S, version, is_anc) {\n        var splices = []\n\n        function add_ins(offset, ins, sort_key, end_cap) {\n            if (typeof(ins) !== 'string')\n                ins = ins.map(x => L.read(x, () => false))\n            if (splices.length > 0) {\n                var prev = splices[splices.length - 1]\n                if (prev[0] + prev[1] === offset && !end_cap && (prev[4] === 'i' || (prev[4] === 'r' && prev[1] === 0))) {\n                    prev[2] = prev[2].concat(ins)\n                    return\n                }\n            }\n            splices.push([offset, 0, ins, sort_key, end_cap ? 'r' : 'i'])\n        }\n\n        function add_del(offset, del, ins) {\n            if (splices.length > 0) {\n                var prev = splices[splices.length - 1]\n                if (prev[0] + prev[1] === offset && prev[4] !== 'i') {\n                    prev[1] += del\n                    return\n                }\n            }\n            splices.push([offset, del, ins, null, 'd'])\n        }\n        \n        var offset = 0\n        function helper(node, _version, end_cap) {\n            if (_version === version) {\n                add_ins(offset, node.elems.slice(0), node.sort_key, end_cap)\n            } else if (node.deleted_by[version] && node.elems.length > 0) {\n                add_del(offset, node.elems.length, node.elems.slice(0, 0))\n            }\n            \n            if ((!_version || is_anc(_version)) && !Object.keys(node.deleted_by).some(is_anc)) {\n                offset += node.elems.length\n            }\n            \n            node.nexts.forEach(next => helper(next, next.version, node.end_cap))\n            if (node.next) helper(node.next, _version)\n        }\n        helper(S, null)\n        splices.forEach(s => {\n            // if we have replaces with 0 deletes,\n            // make them have at least 1 delete..\n            // this can happen when there are multiple replaces of the same text,\n            // and our code above will associate those deletes with only one of them\n            if (s[4] === 'r' && s[1] === 0) s[1] = 1\n        })\n        return splices\n    }\n\n    function apply_bubbles(to_bubble) {\n        function recurse(x) {\n            if (is_lit(x)) return x\n            if (x.t == 'val') {\n                space_dag_apply_bubbles(x.S, to_bubble)\n                traverse_space_dag(x.S, () => true, node => {\n                    node.elems = node.elems.slice(0, 1).map(recurse)\n                }, true)\n                if (x.S.nexts.length == 0 && !x.S.next && x.S.elems.length == 1 && is_lit(x.S.elems[0])) return x.S.elems[0]\n                return x\n            }\n            if (x.t == 'arr') {\n                space_dag_apply_bubbles(x.S, to_bubble)\n                traverse_space_dag(x.S, () => true, node => {\n                    node.elems = node.elems.map(recurse)\n                }, true)\n                if (x.S.nexts.length == 0 && !x.S.next && x.S.elems.every(is_lit) && !Object.keys(x.S.deleted_by).length) return {t: 'lit', S: x.S.elems.map(get_lit)}\n                return x\n            }\n            if (x.t == 'obj') {\n                Object.entries(x.S).forEach(e => {\n                    var y = x.S[e[0]] = recurse(e[1])\n                    if (y == null) delete x.S[e[0]]\n                })\n                if (Object.values(x.S).every(is_lit)) {\n                    var o = {}\n                    Object.entries(x.S).forEach(e => o[e[0]] = get_lit(e[1]))\n                    return {t: 'lit', S: o}\n                }\n                return x\n            }\n            if (x.t == 'str') {\n                space_dag_apply_bubbles(x.S, to_bubble)\n                if (x.S.nexts.length == 0 && !x.S.next && !Object.keys(x.S.deleted_by).length) return x.S.elems\n                return x\n            }\n        }\n        L.S = recurse(L.S)\n\n        Object.entries(to_bubble).forEach(([version, bubble]) => {\n            if (version === bubble[1])\n                L.T[bubble[0]] = L.T[bubble[1]]\n            if (version !== bubble[0]) {\n                delete L.T[version]\n                delete L.version_cache[version]\n            } else L.version_cache[version] = null\n        })\n\n        var leaves = Object.keys(L.current_version)\n        var acked_boundary = Object.keys(L.acked_boundary)\n        var fiss = Object.keys(L.fissures)\n        if (leaves.length == 1 && acked_boundary.length == 1\n            && leaves[0] == acked_boundary[0] && fiss.length == 0) {\n            L.T = { [leaves[0]]: {} }\n            L.S = make_lit(L.read())\n        }\n    }\n\n    function add_version(version, parents, patches, sort_keys, is_anc) {\n        if (L.T[version]) return\n\n        L.T[version] = {...parents}\n\n        L.version_cache[version] = JSON.parse(JSON.stringify({\n            version, parents, patches, sort_keys\n        }))\n\n        Object.keys(parents).forEach(k => {\n            if (L.current_version[k])\n                delete L.current_version[k]\n        })\n        L.current_version[version] = true\n        \n        if (!sort_keys) sort_keys = {}\n        \n        if (!Object.keys(parents).length) {\n            var parse = parse_patch(patches[0])\n            L.S = make_lit(parse.value)\n            return\n        }\n        \n        if (!is_anc) {\n            if (parents == L.current_version) {\n                is_anc = _version => _version != version\n            } else {\n                var ancs = ancestors(parents)\n                is_anc = _version => ancs[_version]\n            }\n        }\n        \n        patches.forEach((patch, i) => {\n            var sort_key = sort_keys[i]\n            var parse = parse_patch(patch)\n            var cur = resolve_path(parse)\n            if (!parse.slice) {\n                if (cur.t != 'val') throw 'bad'\n                var len = space_dag_length(cur.S, is_anc)\n                space_dag_add_version(cur.S, version, [[0, len, [parse.delete ? null : make_lit(parse.value)]]], sort_key, is_anc)\n            } else {\n                if (typeof parse.value === 'string' && cur.t !== 'str')\n                    throw `Cannot splice string ${JSON.stringify(parse.value)} into non-string`\n                if (parse.value instanceof Array && cur.t !== 'arr')\n                    throw `Cannot splice array ${JSON.stringify(parse.value)} into non-array`\n                if (parse.value instanceof Array)\n                    parse.value = parse.value.map(x => make_lit(x))\n\n                var r0 = parse.slice[0]\n                var r1 = parse.slice[1]\n                if (r0 < 0 || Object.is(r0, -0) || r1 < 0 || Object.is(r1, -0)) {\n                    let len = space_dag_length(cur.S, is_anc)\n                    if (r0 < 0 || Object.is(r0, -0)) r0 = len + r0\n                    if (r1 < 0 || Object.is(r1, -0)) r1 = len + r1\n                }\n\n                space_dag_add_version(cur.S, version, [[r0, r1 - r0, parse.value]], sort_key, is_anc)\n            }\n        })\n\n        function resolve_path(parse) {\n            var cur = L.S\n            if (!cur || typeof(cur) != 'object' || cur.t == 'lit')\n                cur = L.S = {t: 'val', S: create_space_dag_node(null, [cur])}\n            var prev_S = null\n            var prev_i = 0\n            for (var i=0; i<parse.path.length; i++) {\n                var key = parse.path[i]\n                if (cur.t == 'val') cur = space_dag_get(prev_S = cur.S, prev_i = 0, is_anc)\n                if (cur.t == 'lit') {\n                    var new_cur = {}\n                    if (cur.S instanceof Array) {\n                        new_cur.t = 'arr'\n                        new_cur.S = create_space_dag_node(null, cur.S.map(x => make_lit(x)))\n                    } else {\n                        if (typeof(cur.S) != 'object') throw 'bad'\n                        new_cur.t = 'obj'\n                        new_cur.S = {}\n                        Object.entries(cur.S).forEach(e => new_cur.S[e[0]] = make_lit(e[1]))\n                    }\n                    cur = new_cur\n                    space_dag_set(prev_S, prev_i, cur, is_anc)\n                }\n                if (cur.t == 'obj') {\n                    let x = cur.S[key]\n                    if (!x || typeof(x) != 'object' || x.t == 'lit')\n                        x = cur.S[key] = {t: 'val', S: create_space_dag_node(null, [x == null ? null : x])}\n                    cur = x\n                } else if (i == parse.path.length - 1 && !parse.slice) {\n                    parse.slice = [key, key + 1]\n                    parse.value = (cur.t == 'str') ? parse.value : [parse.value]\n                } else if (cur.t == 'arr') {\n                    cur = space_dag_get(prev_S = cur.S, prev_i = key, is_anc)\n                } else throw 'bad'\n            }\n            if (parse.slice) {\n                if (cur.t == 'val') cur = space_dag_get(prev_S = cur.S, prev_i = 0, is_anc)\n                if (typeof(cur) == 'string') {\n                    cur = {t: 'str', S: create_space_dag_node(null, cur)}\n                    space_dag_set(prev_S, prev_i, cur, is_anc)\n                } else if (cur.t == 'lit') {\n                    if (!(cur.S instanceof Array)) throw 'bad'\n                    cur = {t: 'arr', S: create_space_dag_node(null, cur.S.map(x => make_lit(x)))}\n                    space_dag_set(prev_S, prev_i, cur, is_anc)\n                }\n            }\n            return cur\n        }\n    }\n\n    function ancestors(versions, ignore_nonexistent) {\n        var result = {}\n        function recurse(version) {\n            if (result[version]) return\n            if (!L.T[version]) {\n                if (ignore_nonexistent) return\n                throw `The version ${version} no existo`\n            }\n            result[version] = true\n            Object.keys(L.T[version]).forEach(recurse)\n        }\n        Object.keys(versions).forEach(recurse)\n        return result\n    }\n\n    L.ancestors = ancestors\n\n    function get_leaves(versions) {\n        var leaves = {...versions}\n        Object.keys(versions).forEach(v => {\n            Object.keys(L.T[v]).forEach(p => delete leaves[p])\n        })\n        return leaves\n    }\n\n    if (!Object.keys(L.T).length) L.set('= \"i am empty\"')\n\n    return L\n}\n\nfunction create_space_dag_node(version, elems, end_cap, sort_key) {\n    return {\n        version : version,\n        sort_key : sort_key,\n        elems : elems,\n        deleted_by : {},\n        end_cap : end_cap,\n        nexts : [],\n        next : null\n    }\n}\n\nfunction space_dag_apply_bubbles(S, to_bubble) {\n\n    traverse_space_dag(S, () => true, node => {\n        if (to_bubble[node.version] && to_bubble[node.version][0] != node.version) {\n            if (!node.sort_key) node.sort_key = node.version\n            node.version = to_bubble[node.version][0]\n        }\n\n        for (var x of Object.keys(node.deleted_by)) {\n            if (to_bubble[x]) {\n                delete node.deleted_by[x]\n                node.deleted_by[to_bubble[x][0]] = true\n            }\n        }\n    }, true)\n\n    function set_nnnext(node, next) {\n        while (node.next) node = node.next\n        node.next = next\n    }\n\n    do_line(S, S.version)\n    function do_line(node, version) {\n        var prev = null\n        while (node) {\n            if (node.nexts[0] && node.nexts[0].version == version) {\n                for (let i = 0; i < node.nexts.length; i++) {\n                    delete node.nexts[i].version\n                    delete node.nexts[i].sort_key\n                    set_nnnext(node.nexts[i], i + 1 < node.nexts.length ? node.nexts[i + 1] : node.next)\n                }\n                node.next = node.nexts[0]\n                node.nexts = []\n            }\n\n            if (node.deleted_by[version]) {\n                node.elems = node.elems.slice(0, 0)\n                node.deleted_by = {}\n                if (prev) { node = prev; continue }\n            }\n\n            var next = node.next\n\n            if (!node.nexts.length && next && (!node.elems.length || !next.elems.length || (Object.keys(node.deleted_by).every(x => next.deleted_by[x]) && Object.keys(next.deleted_by).every(x => node.deleted_by[x])))) {\n                if (!node.elems.length) node.deleted_by = next.deleted_by\n                node.elems = node.elems.concat(next.elems)\n                node.end_cap = next.end_cap\n                node.nexts = next.nexts\n                node.next = next.next\n                continue\n            }\n\n            for (let n of node.nexts) do_line(n, n.version)\n\n            prev = node\n            node = next\n        }\n    }\n}\n\nfunction space_dag_get(S, i, is_anc) {\n    var ret = null\n    var offset = 0\n    traverse_space_dag(S, is_anc ? is_anc : () => true, (node) => {\n        if (i - offset < node.elems.length) {\n            ret = node.elems[i - offset]\n            return false\n        }\n        offset += node.elems.length\n    })\n    return ret\n}\n\nfunction space_dag_set(S, i, v, is_anc) {\n    var offset = 0\n    traverse_space_dag(S, is_anc ? is_anc : () => true, (node) => {\n        if (i - offset < node.elems.length) {\n            node.elems[i - offset] = v\n            return false\n        }\n        offset += node.elems.length\n    })\n}\n\nfunction space_dag_length(S, is_anc) {\n    var count = 0\n    traverse_space_dag(S, is_anc ? is_anc : () => true, node => {\n        count += node.elems.length\n    })\n    return count\n}\n\nfunction space_dag_break_node(node, x, end_cap, new_next) {\n    var tail = create_space_dag_node(null, node.elems.slice(x), node.end_cap)\n    Object.assign(tail.deleted_by, node.deleted_by)\n    tail.nexts = node.nexts\n    tail.next = node.next\n    \n    node.elems = node.elems.slice(0, x)\n    node.end_cap = end_cap\n    node.nexts = new_next ? [new_next] : []\n    node.next = tail\n\n    return tail\n}\n\nfunction space_dag_add_version(S, version, splices, sort_key, is_anc) {\n    \n    function add_to_nexts(nexts, to) {\n        var i = binarySearch(nexts, function (x) {\n            if ((to.sort_key || to.version) < (x.sort_key || x.version)) return -1\n            if ((to.sort_key || to.version) > (x.sort_key || x.version)) return 1\n            return 0\n        })\n        nexts.splice(i, 0, to)\n    }\n    \n    var si = 0\n    var delete_up_to = 0\n    \n    var process_patch = (node, offset, has_nexts, prev, _version, deleted) => {\n        var s = splices[si]\n        if (!s) return false\n        \n        if (deleted) {\n            if (s[1] == 0 && s[0] == offset) {\n                if (node.elems.length == 0 && !node.end_cap && has_nexts) return\n                var new_node = create_space_dag_node(version, s[2], null, sort_key)\n                if (node.elems.length == 0 && !node.end_cap)\n                    add_to_nexts(node.nexts, new_node)\n                else\n                    space_dag_break_node(node, 0, undefined, new_node)\n                si++\n            }\n            return            \n        }\n        \n        if (s[1] == 0) {\n            var d = s[0] - (offset + node.elems.length)\n            if (d > 0) return\n            if (d == 0 && !node.end_cap && has_nexts) return\n            var new_node = create_space_dag_node(version, s[2], null, sort_key)\n            if (d == 0 && !node.end_cap) {\n                add_to_nexts(node.nexts, new_node)\n            } else {\n                space_dag_break_node(node, s[0] - offset, undefined, new_node)\n            }\n            si++\n            return\n        }\n        \n        if (delete_up_to <= offset) {\n            var d = s[0] - (offset + node.elems.length)\n            if (d >= 0) return\n            delete_up_to = s[0] + s[1]\n            \n            if (s[2]) {\n                var new_node = create_space_dag_node(version, s[2], null, sort_key)\n                if (s[0] == offset && prev && prev.end_cap) {\n                    add_to_nexts(prev.nexts, new_node)\n                } else {\n                    space_dag_break_node(node, s[0] - offset, true, new_node)\n                    return\n                }\n            } else {\n                if (s[0] == offset) {\n                } else {\n                    space_dag_break_node(node, s[0] - offset)\n                    return\n                }\n            }\n        }\n        \n        if (delete_up_to > offset) {\n            if (delete_up_to <= offset + node.elems.length) {\n                if (delete_up_to < offset + node.elems.length) {\n                    space_dag_break_node(node, delete_up_to - offset)\n                }\n                si++\n            }\n            node.deleted_by[version] = true\n            return\n        }\n    }\n    \n    var f = is_anc\n    var exit_early = {}\n    var offset = 0\n    function traverse(node, prev, version) {\n        var has_nexts = node.nexts.find(next => f(next.version))\n        var deleted = Object.keys(node.deleted_by).some(version => f(version))\n        if (process_patch(node, offset, has_nexts, prev, version, deleted) == false)\n            throw exit_early\n        if (!deleted) {\n            offset += node.elems.length\n        }\n        for (var next of node.nexts)\n            if (f(next.version)) traverse(next, null, next.version)\n        if (node.next) traverse(node.next, node, version)\n    }\n    try {\n        if (!S) debugger\n        traverse(S, null, S.version)\n    } catch (e) {\n        if (e != exit_early) throw e\n    }\n    \n}\n\nfunction traverse_space_dag(S, f, cb, view_deleted, tail_cb) {\n    var exit_early = {}\n    var offset = 0\n    function helper(node, prev, version) {\n        var has_nexts = node.nexts.find(next => f(next.version))\n        var deleted = Object.keys(node.deleted_by).some(version => f(version))\n        if (view_deleted || !deleted) {\n            if (cb(node, offset, has_nexts, prev, version, deleted) == false)\n                throw exit_early\n            offset += node.elems.length\n        }\n        for (var next of node.nexts)\n            if (f(next.version)) helper(next, null, next.version)\n        if (node.next) helper(node.next, node, version)\n        else if (tail_cb) tail_cb(node)\n    }\n    try {\n        helper(S, null, S.version)\n    } catch (e) {\n        if (e != exit_early) throw e\n    }\n}\n\nfunction parse_patch(patch) {\n    var ret = { path : [] }\n    var re = /^(delete)\\s+|\\.?([^\\.\\[ =]+)|\\[((\\-?\\d+)(:\\-?\\d+)?|'(\\\\'|[^'])*'|\"(\\\\\"|[^\"])*\")\\]|\\s*=\\s*([\\s\\S]*)/g\n    var m\n    while (m = re.exec(patch)) {\n        if (m[1]) ret.delete = true\n        else if (m[2]) ret.path.push(m[2])\n        else if (m[3] && m[5]) ret.slice = [JSON.parse(m[4]), JSON.parse(m[5].substr(1))]\n        else if (m[3]) ret.path.push(JSON.parse(m[3]))\n        else if (m[8]) ret.value = JSON.parse(m[8])\n    }\n    return ret\n}\n\n// modified from https://stackoverflow.com/questions/22697936/binary-search-in-javascript\nfunction binarySearch(ar, compare_fn) {\n    var m = 0;\n    var n = ar.length - 1;\n    while (m <= n) {\n        var k = (n + m) >> 1;\n        var cmp = compare_fn(ar[k]);\n        if (cmp > 0) {\n            m = k + 1;\n        } else if(cmp < 0) {\n            n = k - 1;\n        } else {\n            return k;\n        }\n    }\n    return m;\n}\n"
  },
  {
    "path": "yarnball/yarnball.html",
    "content": "\n<script src=\"https://bloop.monster/yarnball.js\"></script>\n<body></body>\n<script>\n\nvar yb = create_yarnball_client('wss://test.bloop.monster:60003/')\n\nvar first_time = true\nvar cb = x => {\n\n    console.log('bloop = ' + JSON.stringify(x))\n\n    if (first_time) {\n        first_time = false\n        yb.set('bloop', '[0:0] = \"A\"')\n        yb.forget('bloop', cb)\n    }\n}\nyb.get('bloop', cb)\n\n</script>\n"
  },
  {
    "path": "yarnball/yarnball.js",
    "content": "\nconsole.log('yarnball 0.001')\n\nfunction create_yarnball_client(base_url) {\n    var self = {}\n    var conns = {}\n\n    self.get = async (key, cb) => {\n        if (!conns[key]) conns[key] = {cbs: []}\n        conns[key].cbs.push(cb)\n        if (!conns[key].loom) {\n            conns[key].load_promise = new Promise(done => conns[key].load_done = done)\n            conns[key].loom = create_loom_client(null, base_url + key, L => {\n                var x = L.read()\n                conns[key].last_val = x\n                conns[key].load_done()\n                for (let c of conns[key].cbs) c(x)\n            })\n        }\n        await conns[key].load_promise\n        return conns[key]?.last_val\n    }\n\n    self.forget = (key, cb) => {\n        if (!conns[key]) return\n        var i = conns[key].cbs.findIndex(x => x == cb)\n        if (i >= 0) {\n            conns[key].cbs.splice(i, 1)\n            if (!conns[key].cbs.length) {\n                conns[key].loom.close()\n                delete conns[key]\n            }\n        }\n    }\n\n    self.set = (key, ...patches) => {\n        if (conns[key]?.last_val != undefined) {\n            conns[key].loom.set(...patches)\n\n            var x = conns[key].loom.read()\n            for (let c of conns[key].cbs) c(x)\n        } else throw 'can not yet set'\n    }\n\n    return self\n}\n\nfunction create_loom_client(L, url, on_change) {\n    var ws = null\n    L = create_loom(L, (to, x) => {\n        ws.readyState == 1 && ws.send(JSON.stringify(x))\n    })\n    L.close = () => {\n        L.forget()\n        ws.onopen = () => {}\n        ws.onmessage = () => {}\n        ws.onclose = () => {}\n        ws.close()\n    }\n    connect()\n    function connect() {\n        ws = new WebSocket(url)\n        ws.onopen = () => {\n            console.log('CONNECTED!')\n            L.get()\n        }\n        ws.onmessage = x => {\n            console.log(`RECV: ${x.data}`)\n\n            x = JSON.parse(x.data)\n\n            if (x.cmd == 'error') {\n                alert('trouble syncing.. note you cannot sync to the same doc twice on the same machine')\n                ws.onclose = () => {}\n                ws.close()\n                return\n            }\n                            \n            var y = L.receive(x)\n            if (!ws.my_peer) ws.my_peer = x.peer\n            if (x.cmd == 'welcome' || x.cmd == 'set') on_change(L, y)\n        }\n        ws.onclose = () => {\n            if (ws.my_peer) L.disconnect(ws.my_peer)\n            setTimeout(connect, 3000)\n        }\n    }\n    return L\n}\n\nfunction create_loom(L, send) {\n    L = L ?? {}\n\n    if (!L.id) L.id = Math.random().toString(36).slice(2)\n    if (!L.next_seq) L.next_seq = 0\n\n    L.S = L.S ?? null\n    L.T = L.T ?? {}\n    L.current_version = L.current_version ?? {}\n\n    L.peers = L.peers ?? {}\n    L.version_cache = L.version_cache ?? {}\n    L.fissures = L.fissures ?? {}\n    L.acked_boundary = L.acked_boundary ?? {}\n    L.unack_boundary = L.unack_boundary ?? {}\n    L.acks_in_process = L.acks_in_process ?? {}\n\n    var orig_send = send\n    send = (to, msg) => {\n        orig_send(to, {peer: L.id, conn: L.peers[to], ...msg})\n    }\n\n    L.get = peer => {\n        send(peer, {cmd: 'get', conn: Math.random().toString(36).slice(2)})\n    }\n\n    L.forget = peer => {\n        send(peer, {cmd: 'forget'})\n    }\n\n    L.disconnect = peer => {\n        if (!L.peers[peer]) return\n        var conn = L.peers[peer]\n        delete L.peers[peer]\n\n        var versions = {}\n        var ack_versions = ancestors(L.acked_boundary)\n        Object.keys(L.T).forEach(v => {\n            if (!ack_versions[v] || L.acked_boundary[v]) versions[v] = true\n        })\n\n        L.receive({cmd: 'fissure', fissure: {a: L.id, b: peer, conn, versions, time: Date.now()}})\n    }\n\n    L.set = (...patches) => {\n        L.receive({cmd: 'set', version: `${L.next_seq++}@${L.id}`, parents: {...L.current_version}, patches})\n    }\n\n    L.read = (is_anc) => {\n        if (!is_anc) is_anc = () => true\n        else if (typeof(is_anc) == 'string') {\n            var ancs = x.ancestors({[is_anc]: true})\n            is_anc = v => ancs[v]\n        } else if (typeof(is_anc) == 'object') {\n            var ancs = x.ancestors(is_anc)\n            is_anc = v => ancs[v]\n        }\n\n        return rec_read(L.S)\n        function rec_read(x) {\n            if (x && typeof(x) == 'object') {\n                if (x.t == 'lit') return JSON.parse(JSON.stringify(x.S))\n                if (x.t == 'val') return rec_read(space_dag_get(x.S, 0, is_anc))\n                if (x.t == 'obj') {\n                    var o = {}\n                    Object.entries(x.S).forEach(([k, v]) => {\n                        var x = rec_read(v)\n                        if (x != null) o[k] = x\n                    })\n                    return o\n                }\n                if (x.t == 'arr') {\n                    var a = []\n                    traverse_space_dag(x.S, is_anc, (node, _, __, ___, ____, deleted) => {\n                        if (!deleted) node.elems.forEach((e) => a.push(rec_read(e)))\n                    }, true)\n                    return a\n                }\n                if (x.t == 'str') {\n                    var s = []\n                    traverse_space_dag(x.S, is_anc, (node, _, __, ___, ____, deleted) => {\n                        if (!deleted) s.push(node.elems)\n                    }, true)\n                    return s.join('')\n                }\n                throw 'bad'\n            } return x\n        }\n    }\n\n    L.receive = ({cmd, version, parents, patches, fissure, versions, fissures, unack_boundary, min_leaves, peer, conn}) => {\n        if (cmd == 'get' || cmd == 'get_back') {\n            if (L.peers[peer]) throw 'bad'\n            L.peers[peer] = conn\n\n            if (cmd == 'get') send(peer, {cmd: 'get_back'})\n            send(peer, {cmd: 'welcome',\n                versions: generate_braid(parents),\n                fissures: Object.values(L.fissures),\n                parents: parents && Object.keys(parents).length ? get_leaves(ancestors(parents, true)) : {}\n            })\n        } else if (cmd == 'forget') {\n            if (!L.peers[peer]) throw 'bad'\n            delete L.peers[peer]\n            L.acks_in_process = {}\n        } else if (cmd == 'set') {\n            for (p in parents) if (!L.T[p]) return send(peer, {cmd: 'error'})\n\n            if (!peer || !L.T[version]) {\n                var rebased_splices = add_version(version, parents, patches)\n                for (let p of Object.keys(L.peers)) if (p != peer) send(p, {cmd: 'set', version, parents, patches})\n\n                L.acks_in_process[version] = {origin: peer, count: Object.keys(L.peers).length}\n                if (peer) L.acks_in_process[version].count--\n            } else if (L.acks_in_process[version]) L.acks_in_process[version].count--\n\n            check_ack_count(version)\n            return rebased_splices\n        } else if (cmd == 'ack1') {\n            if (L.acks_in_process[version]) {\n                L.acks_in_process[version].count--\n                check_ack_count(version)\n            }\n        } else if (cmd == 'ack2') {\n            if (!L.T[version]) return\n            if (ancestors(L.unack_boundary)[version]) return\n            if (ancestors(L.acked_boundary)[version]) return\n            add_full_ack_leaf(version)\n            for (let p of Object.keys(L.peers)) if (p != peer) send(p, {cmd: 'ack2', version})\n        } else if (cmd == 'fissure') {\n            var key = fissure.a + ':' + fissure.b + ':' + fissure.conn\n            if (!L.fissures[key]) {\n                L.fissures[key] = fissure\n                L.acks_in_process = {}\n                for (let p of Object.keys(L.peers)) if (p != peer) send(p, {cmd: 'fissure', fissure})\n                if (fissure.b == L.id) L.receive({cmd: 'fissure', fissure: {...fissure, a: L.id, b: fissure.a}})\n            }\n        } else if (cmd == 'welcome') {\n            var versions_to_add = {}\n            versions.forEach(v => versions_to_add[v.version] = v.parents)\n            versions.forEach(v => {\n                if (L.T[v.version]) {\n                    remove_ancestors(v.version)\n                    function remove_ancestors(v) {\n                        if (versions_to_add[v]) {\n                            Object.keys(versions_to_add[v]).forEach(remove_ancestors)\n                            delete versions_to_add[v]\n                        }\n                    }\n                }\n            })\n\n            var send_error = () => send(peer, {cmd: 'error'})\n\n            var added_versions = []\n            for (var v of versions) {\n                if (versions_to_add[v.version]) {\n                    if (!Object.keys(v.parents).every(p => L.T[p])) return send_error()\n\n                    add_version(v.version, v.parents, v.patches, v.sort_keys)\n                    added_versions.push(v)\n                }\n            }\n\n            if (((min_leaves && Object.keys(min_leaves).some(k => !L.T[k])) || (unack_boundary && Object.keys(unack_boundary).some(k => !L.T[k])))) return send_error()\n\n            var new_fissures = []\n            var gen_fissures = []\n            fissures.forEach(f => {\n                var key = f.a + ':' + f.b + ':' + f.conn\n                if (!L.fissures[key]) {\n\n                    new_fissures.push(f)\n                    L.fissures[key] = f\n\n                    if (f.b == L.id) gen_fissures.push({...f, a: L.id, b: f.a})\n                }\n            })\n\n            if (!unack_boundary) unack_boundary = {...L.current_version}\n\n            var our_conn_versions = ancestors(L.T, L.unack_boundary)\n            var new_conn_versions = ancestors(L.T, unack_boundary)\n\n            Object.keys(L.unack_boundary).forEach(x => {\n                if (new_conn_versions[x] && !unack_boundary[x])\n                    delete L.unack_boundary[x]\n            })\n            Object.keys(unack_boundary).forEach(x => {\n                if (!our_conn_versions[x]) L.unack_boundary[x] = true\n            })\n            \n            if (!min_leaves) {\n                if (versions.length === 0 && (!parents || Object.keys(parents).length === 0))\n                    min_leaves = {...L.current_version}\n                else {\n                    min_leaves = parents ? {...parents} : {}\n                    versions.forEach(v => {\n                        if (!versions_to_add[v.version]) min_leaves[v.version] = true\n                    })\n                    min_leaves = get_leaves(ancestors(min_leaves, true))\n                }\n            }\n\n            var min_versions = ancestors(min_leaves)\n            var ack_versions = ancestors(L.acked_boundary)\n            Object.keys(L.acked_boundary).forEach(x => {\n                if (!min_versions[x]) delete L.acked_boundary[x]\n            })\n            Object.keys(min_leaves).forEach(x => {\n                if (ack_versions[x]) L.acked_boundary[x] = true\n            })\n\n            L.acks_in_process = {}\n\n            if (added_versions.length > 0 || new_fissures.length > 0) {\n                for (let p of Object.keys(L.peers)) if (p != peer) send(p, {cmd: 'welcome', key, versions: added_versions, unack_boundary,min_leaves, fissures: new_fissures})\n            }\n\n            gen_fissures.forEach(f => L.receive({cmd: 'fissure', fissure: f}))\n        }\n    }\n\n    var is_lit = x => !x || typeof(x) != 'object' || x.t == 'lit'\n    var get_lit = x => (x && typeof(x) == 'object' && x.t == 'lit') ? x.S : x\n    let make_lit = x => (x && typeof(x) == 'object') ? {t: 'lit', S: x} : x\n\n    function prune() {\n        var unremovable = {}\n\n        Object.entries(L.fissures).forEach(x => {\n            var other_key = x[1].b + ':' + x[1].a + ':' + x[1].conn\n            var other = L.fissures[other_key]\n            if (other) {\n                delete L.fissures[x[0]]\n                delete L.fissures[other_key]\n            }\n        })\n\n        if (L.fissure_lifetime != null) {\n            var now = Date.now()\n            Object.entries(L.fissures).forEach(([k, f]) => {\n                if (f.time == null) f.time = now\n                if (f.time <= now - L.fissure_lifetime) {\n                    delete L.fissures[k]\n                }\n            })\n        }\n\n        var keep_us = {}\n\n        Object.values(L.fissures).forEach(f => {\n            Object.keys(f.versions).forEach(v => keep_us[v] = true)\n        })\n\n        var acked = ancestors(L.T, L.acked_boundary)\n        Object.keys(L.T).forEach(x => {\n            if (!acked[x] || L.acked_boundary[x]) keep_us[x] = true\n        })\n\n        var children = {}\n        Object.entries(L.T).forEach(([v, parents]) => {\n            Object.keys(parents).forEach(parent => {\n                if (!children[parent]) children[parent] = {}\n                children[parent][v] = true\n            })\n        })\n\n        var to_bubble = {}\n        var bubble_tops = {}\n        var bubble_bottoms = {}\n        \n        function mark_bubble(bottom, top, tag) {\n            if (!to_bubble[bottom]) {\n                to_bubble[bottom] = tag\n                if (bottom !== top) Object.keys(L.T[bottom]).forEach(p => mark_bubble(p, top, tag))\n            }\n        }\n        \n        var done = {}\n        function f(cur) {\n            if (!L.T[cur]) return\n            if (done[cur]) return\n            done[cur] = true\n            \n            if (!to_bubble[cur] || bubble_tops[cur]) {\n                var bubble_top = find_one_bubble(cur)\n                if (bubble_top) {\n                    delete to_bubble[cur]\n                    mark_bubble(cur, bubble_top, bubble_tops[cur] || cur)\n                    bubble_tops[bubble_top] = bubble_tops[cur] || cur\n                    bubble_bottoms[bubble_tops[cur] || cur] = bubble_top\n                }\n            }\n\n            Object.keys(L.T[cur]).forEach(f)\n        }\n        Object.keys(L.current_version).forEach(f)\n\n        function find_one_bubble(cur) {\n            var seen = {[cur]: true}\n            var q = Object.keys(L.T[cur])\n            var expecting = Object.fromEntries(q.map(x => [x, true]))\n            while (q.length) {\n                cur = q.pop()\n                if (!L.T[cur]) return null\n                if (keep_us[cur]) return null\n                if (Object.keys(children[cur]).every(c => seen[c])) {\n                    seen[cur] = true\n                    delete expecting[cur]\n                    if (!Object.keys(expecting).length) return cur\n                    \n                    Object.keys(L.T[cur]).forEach(p => {\n                        q.push(p)\n                        expecting[p] = true\n                    })\n                }\n            }\n            return null\n        }\n\n        to_bubble = Object.fromEntries(Object.entries(to_bubble).map(\n            ([v, bub]) => [v, [bub, bubble_bottoms[bub]]]\n        ))\n\n        apply_bubbles(to_bubble)\n    }\n\n    function add_full_ack_leaf(version) {\n        var marks = {}\n        function f(v) {\n            if (!marks[v]) {\n                marks[v] = true\n                delete L.unack_boundary[v]\n                delete L.acked_boundary[v]\n                delete L.acks_in_process[v]\n                Object.keys(L.T[v]).forEach(f)\n            }\n        }\n        f(version)\n\n        L.acked_boundary[version] = true\n        prune(L)\n    }\n\n    function check_ack_count(version) {\n        if (L.acks_in_process[version] && L.acks_in_process[version].count == 0) {\n            if (L.acks_in_process[version].origin) {\n                send(L.acks_in_process[version].origin, {cmd: 'ack1', version})\n            } else {\n                add_full_ack_leaf(version)\n                for (let p of Object.keys(L.peers)) send(p, {cmd: 'ack2', version})\n            }\n        }\n    }\n\n    function generate_braid(versions) {\n        var anc = versions && Object.keys(versions).length ? ancestors(versions, true) : {}\n        var is_anc = x => anc[x]\n\n        if (Object.keys(L.T).length === 0) return []\n\n        return Object.entries(L.version_cache).filter(x => !is_anc(x[0])).map(([version, set_message]) => {\n            return L.version_cache[version] = set_message || generate_set_message(version)\n        })\n\n        function generate_set_message(version) {\n            if (!Object.keys(L.T[version]).length) {\n                return {\n                    version,\n                    parents: {},\n                    patches: [` = ${JSON.stringify(L.read(v => v == version))}`]\n                }\n            }\n        \n            var is_lit = x => !x || typeof(x) !== 'object' || x.t === 'lit'\n            var get_lit = x => (x && typeof(x) === 'object' && x.t === 'lit') ? x.S : x\n        \n            var ancs = ancestors({[version]: true})\n            delete ancs[version]\n            var is_anc = x => ancs[x]\n            var path = []\n            var patches = []\n            var sort_keys = {}\n            recurse(L.S)\n            function recurse(x) {\n                if (is_lit(x)) {\n                } else if (x.t === 'val') {\n                    space_dag_generate_braid(x.S, version, is_anc).forEach(s => {\n                        if (s[2].length) {\n                            patches.push(`${path.join('')} = ${JSON.stringify(s[2][0])}`)\n                            if (s[3]) sort_keys[patches.length - 1] = s[3]\n                        }\n                    })\n                    traverse_space_dag(x.S, is_anc, node => {\n                        node.elems.forEach(recurse)\n                    })\n                } else if (x.t === 'arr') {\n                    space_dag_generate_braid(x.S, version, is_anc).forEach(s => {\n                        patches.push(`${path.join('')}[${s[0]}:${s[0] + s[1]}] = ${JSON.stringify(s[2])}`)\n                        if (s[3]) sort_keys[patches.length - 1] = s[3]\n                    })\n                    var i = 0\n                    traverse_space_dag(x.S, is_anc, node => {\n                        node.elems.forEach(e => {\n                            path.push(`[${i++}]`)\n                            recurse(e)\n                            path.pop()\n                        })\n                    })\n                } else if (x.t === 'obj') {\n                    Object.entries(x.S).forEach(e => {\n                        path.push('[' + JSON.stringify(e[0]) + ']')\n                        recurse(e[1])\n                        path.pop()\n                    })\n                } else if (x.t === 'str') {\n                    space_dag_generate_braid(x.S, version, is_anc).forEach(s => {\n                        patches.push(`${path.join('')}[${s[0]}:${s[0] + s[1]}] = ${JSON.stringify(s[2])}`)\n                        if (s[3]) sort_keys[patches.length - 1] = s[3]\n                    })\n                }\n            }\n        \n            return {\n                version,\n                parents: {...L.T[version]},\n                patches,\n                sort_keys\n            }\n        }\n    }\n\n    function apply_bubbles(to_bubble) {\n        function recurse(x) {\n            if (is_lit(x)) return x\n            if (x.t == 'val') {\n                space_dag_apply_bubbles(x.S, to_bubble)\n                traverse_space_dag(x.S, () => true, node => {\n                    node.elems = node.elems.slice(0, 1).map(recurse)\n                }, true)\n                if (x.S.nexts.length == 0 && !x.S.next && x.S.elems.length == 1 && is_lit(x.S.elems[0])) return x.S.elems[0]\n                return x\n            }\n            if (x.t == 'arr') {\n                space_dag_apply_bubbles(x.S, to_bubble)\n                traverse_space_dag(x.S, () => true, node => {\n                    node.elems = node.elems.map(recurse)\n                }, true)\n                if (x.S.nexts.length == 0 && !x.S.next && x.S.elems.every(is_lit) && !Object.keys(x.S.deleted_by).length) return {t: 'lit', S: x.S.elems.map(get_lit)}\n                return x\n            }\n            if (x.t == 'obj') {\n                Object.entries(x.S).forEach(e => {\n                    var y = x.S[e[0]] = recurse(e[1])\n                    if (y == null) delete x.S[e[0]]\n                })\n                if (Object.values(x.S).every(is_lit)) {\n                    var o = {}\n                    Object.entries(x.S).forEach(e => o[e[0]] = get_lit(e[1]))\n                    return {t: 'lit', S: o}\n                }\n                return x\n            }\n            if (x.t == 'str') {\n                space_dag_apply_bubbles(x.S, to_bubble)\n                if (x.S.nexts.length == 0 && !x.S.next && !Object.keys(x.S.deleted_by).length) return x.S.elems\n                return x\n            }\n        }\n        L.S = recurse(L.S)\n\n        Object.entries(to_bubble).forEach(([version, bubble]) => {\n            if (version === bubble[1])\n                L.T[bubble[0]] = L.T[bubble[1]]\n            if (version !== bubble[0]) {\n                delete L.T[version]\n                delete L.version_cache[version]\n            } else L.version_cache[version] = null\n        })\n\n        var leaves = Object.keys(L.current_version)\n        var acked_boundary = Object.keys(L.acked_boundary)\n        var fiss = Object.keys(L.fissures)\n        if (leaves.length == 1 && acked_boundary.length == 1\n            && leaves[0] == acked_boundary[0] && fiss.length == 0) {\n            L.T = { [leaves[0]]: {} }\n            L.S = make_lit(L.read())\n        }\n    }\n\n    function add_version(version, parents, patches, sort_keys, is_anc) {\n        if (L.T[version]) return\n\n        L.T[version] = {...parents}\n\n        L.version_cache[version] = JSON.parse(JSON.stringify({\n            version, parents, patches, sort_keys\n        }))\n\n        Object.keys(parents).forEach(k => {\n            if (L.current_version[k])\n                delete L.current_version[k]\n        })\n        L.current_version[version] = true\n        \n        if (!sort_keys) sort_keys = {}\n        \n        if (!Object.keys(parents).length) {\n            var parse = parse_patch(patches[0])\n            L.S = make_lit(parse.value)\n            return\n        }\n        \n        if (!is_anc) {\n            if (parents == L.current_version) {\n                is_anc = _version => _version != version\n            } else {\n                var ancs = ancestors(parents)\n                is_anc = _version => ancs[_version]\n            }\n        }\n        \n        return patches.map((patch, i) => {\n            var sort_key = sort_keys[i]\n            var parse = parse_patch(patch)\n            var cur = resolve_path(parse)\n            if (!parse.slice) {\n                if (cur.t != 'val') throw 'bad'\n                var len = space_dag_length(cur.S, is_anc)\n                space_dag_add_version(cur.S, version, [[0, len, [parse.delete ? null : make_lit(parse.value)]]], sort_key, is_anc)\n            } else {\n                if (typeof parse.value === 'string' && cur.t !== 'str')\n                    throw `Cannot splice string ${JSON.stringify(parse.value)} into non-string`\n                if (parse.value instanceof Array && cur.t !== 'arr')\n                    throw `Cannot splice array ${JSON.stringify(parse.value)} into non-array`\n                if (parse.value instanceof Array)\n                    parse.value = parse.value.map(x => make_lit(x))\n\n                var r0 = parse.slice[0]\n                var r1 = parse.slice[1]\n                if (r0 < 0 || Object.is(r0, -0) || r1 < 0 || Object.is(r1, -0)) {\n                    let len = space_dag_length(cur.S, is_anc)\n                    if (r0 < 0 || Object.is(r0, -0)) r0 = len + r0\n                    if (r1 < 0 || Object.is(r1, -0)) r1 = len + r1\n                }\n\n                return space_dag_add_version(cur.S, version, [[r0, r1 - r0, parse.value]], sort_key, is_anc)\n            }\n        })\n\n        function resolve_path(parse) {\n            var cur = L.S\n            if (!cur || typeof(cur) != 'object' || cur.t == 'lit')\n                cur = L.S = {t: 'val', S: create_space_dag_node(null, [cur])}\n            var prev_S = null\n            var prev_i = 0\n            for (var i=0; i<parse.path.length; i++) {\n                var key = parse.path[i]\n                if (cur.t == 'val') cur = space_dag_get(prev_S = cur.S, prev_i = 0, is_anc)\n                if (cur.t == 'lit') {\n                    var new_cur = {}\n                    if (cur.S instanceof Array) {\n                        new_cur.t = 'arr'\n                        new_cur.S = create_space_dag_node(null, cur.S.map(x => make_lit(x)))\n                    } else {\n                        if (typeof(cur.S) != 'object') throw 'bad'\n                        new_cur.t = 'obj'\n                        new_cur.S = {}\n                        Object.entries(cur.S).forEach(e => new_cur.S[e[0]] = make_lit(e[1]))\n                    }\n                    cur = new_cur\n                    space_dag_set(prev_S, prev_i, cur, is_anc)\n                }\n                if (cur.t == 'obj') {\n                    let x = cur.S[key]\n                    if (!x || typeof(x) != 'object' || x.t == 'lit')\n                        x = cur.S[key] = {t: 'val', S: create_space_dag_node(null, [x == null ? null : x])}\n                    cur = x\n                } else if (i == parse.path.length - 1 && !parse.slice) {\n                    parse.slice = [key, key + 1]\n                    parse.value = (cur.t == 'str') ? parse.value : [parse.value]\n                } else if (cur.t == 'arr') {\n                    cur = space_dag_get(prev_S = cur.S, prev_i = key, is_anc)\n                } else throw 'bad'\n            }\n            if (parse.slice) {\n                if (cur.t == 'val') cur = space_dag_get(prev_S = cur.S, prev_i = 0, is_anc)\n                if (typeof(cur) == 'string') {\n                    cur = {t: 'str', S: create_space_dag_node(null, cur)}\n                    space_dag_set(prev_S, prev_i, cur, is_anc)\n                } else if (cur.t == 'lit') {\n                    if (!(cur.S instanceof Array)) throw 'bad'\n                    cur = {t: 'arr', S: create_space_dag_node(null, cur.S.map(x => make_lit(x)))}\n                    space_dag_set(prev_S, prev_i, cur, is_anc)\n                }\n            }\n            return cur\n        }\n    }\n\n    function ancestors(versions, ignore_nonexistent) {\n        var result = {}\n        function recurse(version) {\n            if (result[version]) return\n            if (!L.T[version]) {\n                if (ignore_nonexistent) return\n                throw `The version ${version} no existo`\n            }\n            result[version] = true\n            Object.keys(L.T[version]).forEach(recurse)\n        }\n        Object.keys(versions).forEach(recurse)\n        return result\n    }\n\n    function get_leaves(versions) {\n        var leaves = {...versions}\n        Object.keys(versions).forEach(v => {\n            Object.keys(L.T[v]).forEach(p => delete leaves[p])\n        })\n        return leaves\n    }\n\n    return L\n}\n\nfunction create_space_dag_node(version, elems, end_cap, sort_key) {\n    return {\n        version : version,\n        sort_key : sort_key,\n        elems : elems,\n        deleted_by : {},\n        end_cap : end_cap,\n        nexts : [],\n        next : null\n    }\n}\n\nfunction space_dag_generate_braid(S, version, is_anc) {\n    var splices = []\n\n    function add_ins(offset, ins, sort_key, end_cap) {\n        if (typeof(ins) !== 'string')\n            ins = ins.map(x => read_raw(x, () => false))\n        if (splices.length > 0) {\n            var prev = splices[splices.length - 1]\n            if (prev[0] + prev[1] === offset && !end_cap && (prev[4] === 'i' || (prev[4] === 'r' && prev[1] === 0))) {\n                prev[2] = prev[2].concat(ins)\n                return\n            }\n        }\n        splices.push([offset, 0, ins, sort_key, end_cap ? 'r' : 'i'])\n    }\n\n    function add_del(offset, del, ins) {\n        if (splices.length > 0) {\n            var prev = splices[splices.length - 1]\n            if (prev[0] + prev[1] === offset && prev[4] !== 'i') {\n                prev[1] += del\n                return\n            }\n        }\n        splices.push([offset, del, ins, null, 'd'])\n    }\n    \n    var offset = 0\n    function helper(node, _version, end_cap) {\n        if (_version === version) {\n            add_ins(offset, node.elems.slice(0), node.sort_key, end_cap)\n        } else if (node.deleted_by[version] && node.elems.length > 0) {\n            add_del(offset, node.elems.length, node.elems.slice(0, 0))\n        }\n        \n        if ((!_version || is_anc(_version)) && !Object.keys(node.deleted_by).some(is_anc)) {\n            offset += node.elems.length\n        }\n        \n        node.nexts.forEach(next => helper(next, next.version, node.end_cap))\n        if (node.next) helper(node.next, _version)\n    }\n    helper(S, null)\n    splices.forEach(s => {\n        // if we have replaces with 0 deletes,\n        // make them have at least 1 delete..\n        // this can happen when there are multiple replaces of the same text,\n        // and our code above will associate those deletes with only one of them\n        if (s[4] === 'r' && s[1] === 0) s[1] = 1\n    })\n    return splices\n}\n\nfunction space_dag_apply_bubbles(S, to_bubble) {\n\n    traverse_space_dag(S, () => true, node => {\n        if (to_bubble[node.version] && to_bubble[node.version][0] != node.version) {\n            if (!node.sort_key) node.sort_key = node.version\n            node.version = to_bubble[node.version][0]\n        }\n\n        for (var x of Object.keys(node.deleted_by)) {\n            if (to_bubble[x]) {\n                delete node.deleted_by[x]\n                node.deleted_by[to_bubble[x][0]] = true\n            }\n        }\n    }, true)\n\n    function set_nnnext(node, next) {\n        while (node.next) node = node.next\n        node.next = next\n    }\n\n    do_line(S, S.version)\n    function do_line(node, version) {\n        var prev = null\n        while (node) {\n            if (node.nexts[0] && node.nexts[0].version == version) {\n                for (let i = 0; i < node.nexts.length; i++) {\n                    delete node.nexts[i].version\n                    delete node.nexts[i].sort_key\n                    set_nnnext(node.nexts[i], i + 1 < node.nexts.length ? node.nexts[i + 1] : node.next)\n                }\n                node.next = node.nexts[0]\n                node.nexts = []\n            }\n\n            if (node.deleted_by[version]) {\n                node.elems = node.elems.slice(0, 0)\n                node.deleted_by = {}\n                if (prev) { node = prev; continue }\n            }\n\n            var next = node.next\n\n            if (!node.nexts.length && next && (!node.elems.length || !next.elems.length || (Object.keys(node.deleted_by).every(x => next.deleted_by[x]) && Object.keys(next.deleted_by).every(x => node.deleted_by[x])))) {\n                if (!node.elems.length) node.deleted_by = next.deleted_by\n                node.elems = node.elems.concat(next.elems)\n                node.end_cap = next.end_cap\n                node.nexts = next.nexts\n                node.next = next.next\n                continue\n            }\n\n            for (let n of node.nexts) do_line(n, n.version)\n\n            prev = node\n            node = next\n        }\n    }\n}\n\nfunction space_dag_get(S, i, is_anc) {\n    var ret = null\n    var offset = 0\n    traverse_space_dag(S, is_anc ? is_anc : () => true, (node) => {\n        if (i - offset < node.elems.length) {\n            ret = node.elems[i - offset]\n            return false\n        }\n        offset += node.elems.length\n    })\n    return ret\n}\n\nfunction space_dag_set(S, i, v, is_anc) {\n    var offset = 0\n    traverse_space_dag(S, is_anc ? is_anc : () => true, (node) => {\n        if (i - offset < node.elems.length) {\n            node.elems[i - offset] = v\n            return false\n        }\n        offset += node.elems.length\n    })\n}\n\nfunction space_dag_length(S, is_anc) {\n    var count = 0\n    traverse_space_dag(S, is_anc ? is_anc : () => true, node => {\n        count += node.elems.length\n    })\n    return count\n}\n\nfunction space_dag_break_node(node, x, end_cap, new_next) {\n    var tail = create_space_dag_node(null, node.elems.slice(x), node.end_cap)\n    Object.assign(tail.deleted_by, node.deleted_by)\n    tail.nexts = node.nexts\n    tail.next = node.next\n    \n    node.elems = node.elems.slice(0, x)\n    node.end_cap = end_cap\n    node.nexts = new_next ? [new_next] : []\n    node.next = tail\n\n    return tail\n}\n\nfunction space_dag_add_version(S, version, splices, sort_key, is_anc) {\n\n    var rebased_splices = []\n    \n    function add_to_nexts(nexts, to) {\n        var i = binarySearch(nexts, function (x) {\n            if ((to.sort_key || to.version) < (x.sort_key || x.version)) return -1\n            if ((to.sort_key || to.version) > (x.sort_key || x.version)) return 1\n            return 0\n        })\n        nexts.splice(i, 0, to)\n    }\n    \n    var si = 0\n    var delete_up_to = 0\n    \n    var process_patch = (node, offset, has_nexts, prev, _version, deleted) => {\n        var s = splices[si]\n        if (!s) return false\n        \n        if (deleted) {\n            if (s[1] == 0 && s[0] == offset) {\n                if (node.elems.length == 0 && !node.end_cap && has_nexts) return\n                var new_node = create_space_dag_node(version, s[2], null, sort_key)\n\n                rebased_splices.push([rebase_offset, 0, s[2]])\n\n                if (node.elems.length == 0 && !node.end_cap)\n                    add_to_nexts(node.nexts, new_node)\n                else\n                    space_dag_break_node(node, 0, undefined, new_node)\n                si++\n            }\n            return            \n        }\n        \n        if (s[1] == 0) {\n            var d = s[0] - (offset + node.elems.length)\n            if (d > 0) return\n            if (d == 0 && !node.end_cap && has_nexts) return\n            var new_node = create_space_dag_node(version, s[2], null, sort_key)\n\n            rebased_splices.push([rebase_offset + s[0] - offset, 0, s[2]])\n\n            if (d == 0 && !node.end_cap) {\n                add_to_nexts(node.nexts, new_node)\n            } else {\n                space_dag_break_node(node, s[0] - offset, undefined, new_node)\n            }\n            si++\n            return\n        }\n        \n        if (delete_up_to <= offset) {\n            var d = s[0] - (offset + node.elems.length)\n            if (d >= 0) return\n            delete_up_to = s[0] + s[1]\n            \n            if (s[2]) {\n                var new_node = create_space_dag_node(version, s[2], null, sort_key)\n\n                rebased_splices.push([rebase_offset + s[0] - offset, 0, s[2]])\n\n                if (s[0] == offset && prev && prev.end_cap) {\n                    add_to_nexts(prev.nexts, new_node)\n                } else {\n                    space_dag_break_node(node, s[0] - offset, true, new_node)\n                    return\n                }\n            } else {\n                if (s[0] == offset) {\n                } else {\n                    space_dag_break_node(node, s[0] - offset)\n                    return\n                }\n            }\n        }\n        \n        if (delete_up_to > offset) {\n            if (delete_up_to <= offset + node.elems.length) {\n                if (delete_up_to < offset + node.elems.length) {\n                    space_dag_break_node(node, delete_up_to - offset)\n                }\n                si++\n            }\n            node.deleted_by[version] = true\n\n            rebased_splices.push([rebase_offset, node.elems.length, ''])\n\n            return\n        }\n    }\n    \n    var f = is_anc\n    var exit_early = {}\n    var offset = 0\n    var rebase_offset = 0\n    function traverse(node, prev, version) {\n        var rebase_deleted = Object.keys(node.deleted_by).length > 0\n        if (!version || f(version)) {\n            var has_nexts = node.nexts.find(next => f(next.version))\n            var deleted = Object.keys(node.deleted_by).some(version => f(version))\n            if (process_patch(node, offset, has_nexts, prev, version, deleted) == false) throw exit_early\n            if (!deleted) offset += node.elems.length\n        }\n        if (!rebase_deleted) rebase_offset += node.elems.length\n\n        for (var next of node.nexts) traverse(next, null, next.version)\n        if (node.next) traverse(node.next, node, version)\n    }\n    try {\n        traverse(S, null, S.version)\n    } catch (e) {\n        if (e != exit_early) throw e\n    }\n\n    return rebased_splices\n}\n\nfunction traverse_space_dag(S, f, cb, view_deleted, tail_cb) {\n    var exit_early = {}\n    var offset = 0\n    function helper(node, prev, version) {\n        var has_nexts = node.nexts.find(next => f(next.version))\n        var deleted = Object.keys(node.deleted_by).some(version => f(version))\n        if (view_deleted || !deleted) {\n            if (cb(node, offset, has_nexts, prev, version, deleted) == false)\n                throw exit_early\n            offset += node.elems.length\n        }\n        for (var next of node.nexts)\n            if (f(next.version)) helper(next, null, next.version)\n        if (node.next) helper(node.next, node, version)\n        else if (tail_cb) tail_cb(node)\n    }\n    try {\n        helper(S, null, S.version)\n    } catch (e) {\n        if (e != exit_early) throw e\n    }\n}\n\nfunction parse_patch(patch) {\n    var ret = { path : [] }\n    var re = /^(delete)\\s+|\\.?([^\\.\\[ =]+)|\\[((\\-?\\d+)(:\\-?\\d+)?|'(\\\\'|[^'])*'|\"(\\\\\"|[^\"])*\")\\]|\\s*=\\s*([\\s\\S]*)/g\n    var m\n    while (m = re.exec(patch)) {\n        if (m[1]) ret.delete = true\n        else if (m[2]) ret.path.push(m[2])\n        else if (m[3] && m[5]) ret.slice = [JSON.parse(m[4]), JSON.parse(m[5].substr(1))]\n        else if (m[3]) ret.path.push(JSON.parse(m[3]))\n        else if (m[8]) ret.value = JSON.parse(m[8])\n    }\n    return ret\n}\n\n// modified from https://stackoverflow.com/questions/22697936/binary-search-in-javascript\nfunction binarySearch(ar, compare_fn) {\n    var m = 0;\n    var n = ar.length - 1;\n    while (m <= n) {\n        var k = (n + m) >> 1;\n        var cmp = compare_fn(ar[k]);\n        if (cmp > 0) {\n            m = k + 1;\n        } else if(cmp < 0) {\n            n = k - 1;\n        } else {\n            return k;\n        }\n    }\n    return m;\n}\n"
  }
]