Repository: giscafer/mapshaper-plus Branch: master Commit: 49981ceefa0a Files: 14 Total size: 1.1 MB Directory structure: gitextract_hdn3tnup/ ├── README.md ├── codecs.js ├── deflate.js ├── elements.css ├── encode.js ├── index.html ├── manifest.js ├── mapshaper-gui.js ├── mapshaper.js ├── page.css ├── pako.deflate.js ├── pako.inflate.js ├── z-worker.js └── zip.js ================================================ FILE CONTENTS ================================================ ================================================ FILE: README.md ================================================ # mapshaper-plus Generate geojson files for [Apache ECharts (incubating)](https://github.com/apache/incubator-echarts) Map,base on mapshaper 基于[mapshaper](https://github.com/mbloch/mapshaper)对geojson地图数据的坐标信息进行压缩编码,并提供可直接生成压缩编码后的echarts map数据格式 通过`mapshaper-plus`可以直接将`shp`格式数据转换为压缩后的echarts数据 ## Demo https://giscafer.github.io/mapshaper-plus/ ## Description **介绍** ——[mapshaper](https://github.com/mbloch/mapshaper)可以将多种数据格式(Shapefile, GeoJSON, TopoJSON 和 Zip files)导入后,对地图的编辑和导出(Shapefile, GeoJSON, TopoJSON, DSV, SVG),功能强大和简单易用。 `mapshaper-plus`是在`mapshaper`基础上拓展对地图坐标信息的压缩编码,很大程度上减小了文件的代码行数和字节大小:譬如一个贵州省的数据,原始的`geojson`数据会在`30M`左右,但在对坐标信息压缩编码后,仅为`1.4M`。 **背景** ——在做echarts图表统计时,需要自制地图数据,但官方没有提供一个平台可以直接将`shp文件`转化为压缩后的`json`或`js`格式的地图文件,而`mapshaper`导出的json数据没有压缩,数据量过大。 使用可以访问[mapshaper-plus在线demo](http://giscafer.github.io/mapshaper-plus/) ## Screenshot ![导出压缩版的数据](https://raw.githubusercontent.com/giscafer/mapshaper-plus/master/images/echarts01.png) ## License mapshaper is licensed under MPL 2.0. and mapshaper-plus is licensed under MIT. > Blog [giscafer.com](http://giscafer.com)  ·  > GitHub [@giscafer](https://github.com/giscafer)  ·  > Weibo [@Nickbing Lao](https://weibo.com/laohoubin) ================================================ FILE: codecs.js ================================================ /// wrapper for pako (https://github.com/nodeca/pako) /* globals pako */ (function(global) { "use strict"; function Codec(isDeflater, options) { var newOptions = { raw: true, chunkSize: 1024 * 1024 }; if (options && typeof options.level === 'number') newOptions.level = options.level; this._backEnd = isDeflater? new pako.Deflate(newOptions) : new pako.Inflate(newOptions); this._chunks = []; this._dataLength = 0; this._backEnd.onData = this._onData.bind(this); } Codec.prototype._onData = function _onData(chunk) { this._chunks.push(chunk); this._dataLength += chunk.length; }; Codec.prototype._fetchData = function _fetchData() { var be = this._backEnd; if (be.err !== 0) throw new Error(be.msg); var chunks = this._chunks; var data; if (chunks.length === 1) data = chunks[0]; else if (chunks.length > 1) { data = new Uint8Array(this._dataLength); for (var i = 0, n = chunks.length, off = 0; i < n; i++) { var chunk = chunks[i]; data.set(chunk, off); off += chunk.length; } } chunks.length = 0; this._dataLength = 0; return data; }; Codec.prototype.append = function append(bytes, onprogress) { this._backEnd.push(bytes, false); return this._fetchData(); }; Codec.prototype.flush = function flush() { this._backEnd.push(new Uint8Array(0), true); return this._fetchData(); }; function Deflater(options) { Codec.call(this, true, options); } Deflater.prototype = Object.create(Codec.prototype); function Inflater() { Codec.call(this, false); } Inflater.prototype = Object.create(Codec.prototype); // 'zip' may not be defined in z-worker and some tests var env = global.zip || global; env.Deflater = env._pako_Deflater = Deflater; env.Inflater = env._pako_Inflater = Inflater; })(this); ================================================ FILE: deflate.js ================================================ /* Copyright (c) 2013 Gildas Lormeau. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The names of the authors may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JCRAFT, INC. OR ANY CONTRIBUTORS TO THIS SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /* * This program is based on JZlib 1.0.2 ymnk, JCraft,Inc. * JZlib is based on zlib-1.1.3, so all credit should go authors * Jean-loup Gailly(jloup@gzip.org) and Mark Adler(madler@alumni.caltech.edu) * and contributors of zlib. */ (function(global) { "use strict"; // Global var MAX_BITS = 15; var D_CODES = 30; var BL_CODES = 19; var LENGTH_CODES = 29; var LITERALS = 256; var L_CODES = (LITERALS + 1 + LENGTH_CODES); var HEAP_SIZE = (2 * L_CODES + 1); var END_BLOCK = 256; // Bit length codes must not exceed MAX_BL_BITS bits var MAX_BL_BITS = 7; // repeat previous bit length 3-6 times (2 bits of repeat count) var REP_3_6 = 16; // repeat a zero length 3-10 times (3 bits of repeat count) var REPZ_3_10 = 17; // repeat a zero length 11-138 times (7 bits of repeat count) var REPZ_11_138 = 18; // The lengths of the bit length codes are sent in order of decreasing // probability, to avoid transmitting the lengths for unused bit // length codes. var Buf_size = 8 * 2; // JZlib version : "1.0.2" var Z_DEFAULT_COMPRESSION = -1; // compression strategy var Z_FILTERED = 1; var Z_HUFFMAN_ONLY = 2; var Z_DEFAULT_STRATEGY = 0; var Z_NO_FLUSH = 0; var Z_PARTIAL_FLUSH = 1; var Z_FULL_FLUSH = 3; var Z_FINISH = 4; var Z_OK = 0; var Z_STREAM_END = 1; var Z_NEED_DICT = 2; var Z_STREAM_ERROR = -2; var Z_DATA_ERROR = -3; var Z_BUF_ERROR = -5; // Tree // see definition of array dist_code below var _dist_code = [ 0, 1, 2, 3, 4, 4, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 0, 0, 16, 17, 18, 18, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29 ]; function Tree() { var that = this; // dyn_tree; // the dynamic tree // max_code; // largest code with non zero frequency // stat_desc; // the corresponding static tree // Compute the optimal bit lengths for a tree and update the total bit // length // for the current block. // IN assertion: the fields freq and dad are set, heap[heap_max] and // above are the tree nodes sorted by increasing frequency. // OUT assertions: the field len is set to the optimal bit length, the // array bl_count contains the frequencies for each bit length. // The length opt_len is updated; static_len is also updated if stree is // not null. function gen_bitlen(s) { var tree = that.dyn_tree; var stree = that.stat_desc.static_tree; var extra = that.stat_desc.extra_bits; var base = that.stat_desc.extra_base; var max_length = that.stat_desc.max_length; var h; // heap index var n, m; // iterate over the tree elements var bits; // bit length var xbits; // extra bits var f; // frequency var overflow = 0; // number of elements with bit length too large for (bits = 0; bits <= MAX_BITS; bits++) s.bl_count[bits] = 0; // In a first pass, compute the optimal bit lengths (which may // overflow in the case of the bit length tree). tree[s.heap[s.heap_max] * 2 + 1] = 0; // root of the heap for (h = s.heap_max + 1; h < HEAP_SIZE; h++) { n = s.heap[h]; bits = tree[tree[n * 2 + 1] * 2 + 1] + 1; if (bits > max_length) { bits = max_length; overflow++; } tree[n * 2 + 1] = bits; // We overwrite tree[n*2+1] which is no longer needed if (n > that.max_code) continue; // not a leaf node s.bl_count[bits]++; xbits = 0; if (n >= base) xbits = extra[n - base]; f = tree[n * 2]; s.opt_len += f * (bits + xbits); if (stree) s.static_len += f * (stree[n * 2 + 1] + xbits); } if (overflow === 0) return; // This happens for example on obj2 and pic of the Calgary corpus // Find the first bit length which could increase: do { bits = max_length - 1; while (s.bl_count[bits] === 0) bits--; s.bl_count[bits]--; // move one leaf down the tree s.bl_count[bits + 1] += 2; // move one overflow item as its brother s.bl_count[max_length]--; // The brother of the overflow item also moves one step up, // but this does not affect bl_count[max_length] overflow -= 2; } while (overflow > 0); for (bits = max_length; bits !== 0; bits--) { n = s.bl_count[bits]; while (n !== 0) { m = s.heap[--h]; if (m > that.max_code) continue; if (tree[m * 2 + 1] != bits) { s.opt_len += (bits - tree[m * 2 + 1]) * tree[m * 2]; tree[m * 2 + 1] = bits; } n--; } } } // Reverse the first len bits of a code, using straightforward code (a // faster // method would use a table) // IN assertion: 1 <= len <= 15 function bi_reverse(code, // the value to invert len // its bit length ) { var res = 0; do { res |= code & 1; code >>>= 1; res <<= 1; } while (--len > 0); return res >>> 1; } // Generate the codes for a given tree and bit counts (which need not be // optimal). // IN assertion: the array bl_count contains the bit length statistics for // the given tree and the field len is set for all tree elements. // OUT assertion: the field code is set for all tree elements of non // zero code length. function gen_codes(tree, // the tree to decorate max_code, // largest code with non zero frequency bl_count // number of codes at each bit length ) { var next_code = []; // next code value for each // bit length var code = 0; // running code value var bits; // bit index var n; // code index var len; // The distribution counts are first used to generate the code values // without bit reversal. for (bits = 1; bits <= MAX_BITS; bits++) { next_code[bits] = code = ((code + bl_count[bits - 1]) << 1); } // Check that the bit counts in bl_count are consistent. The last code // must be all ones. // Assert (code + bl_count[MAX_BITS]-1 == (1<= 1; n--) s.pqdownheap(tree, n); // Construct the Huffman tree by repeatedly combining the least two // frequent nodes. node = elems; // next internal node of the tree do { // n = node of least frequency n = s.heap[1]; s.heap[1] = s.heap[s.heap_len--]; s.pqdownheap(tree, 1); m = s.heap[1]; // m = node of next least frequency s.heap[--s.heap_max] = n; // keep the nodes sorted by frequency s.heap[--s.heap_max] = m; // Create a new node father of n and m tree[node * 2] = (tree[n * 2] + tree[m * 2]); s.depth[node] = Math.max(s.depth[n], s.depth[m]) + 1; tree[n * 2 + 1] = tree[m * 2 + 1] = node; // and insert the new node in the heap s.heap[1] = node++; s.pqdownheap(tree, 1); } while (s.heap_len >= 2); s.heap[--s.heap_max] = s.heap[1]; // At this point, the fields freq and dad are set. We can now // generate the bit lengths. gen_bitlen(s); // The field len is now set, we can generate the bit codes gen_codes(tree, that.max_code, s.bl_count); }; } Tree._length_code = [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 12, 12, 13, 13, 13, 13, 14, 14, 14, 14, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19, 19, 19, 19, 19, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 28 ]; Tree.base_length = [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 0 ]; Tree.base_dist = [ 0, 1, 2, 3, 4, 6, 8, 12, 16, 24, 32, 48, 64, 96, 128, 192, 256, 384, 512, 768, 1024, 1536, 2048, 3072, 4096, 6144, 8192, 12288, 16384, 24576 ]; // Mapping from a distance to a distance code. dist is the distance - 1 and // must not have side effects. _dist_code[256] and _dist_code[257] are never // used. Tree.d_code = function(dist) { return ((dist) < 256 ? _dist_code[dist] : _dist_code[256 + ((dist) >>> 7)]); }; // extra bits for each length code Tree.extra_lbits = [ 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0 ]; // extra bits for each distance code Tree.extra_dbits = [ 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13 ]; // extra bits for each bit length code Tree.extra_blbits = [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7 ]; Tree.bl_order = [ 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 ]; // StaticTree function StaticTree(static_tree, extra_bits, extra_base, elems, max_length) { var that = this; that.static_tree = static_tree; that.extra_bits = extra_bits; that.extra_base = extra_base; that.elems = elems; that.max_length = max_length; } StaticTree.static_ltree = [ 12, 8, 140, 8, 76, 8, 204, 8, 44, 8, 172, 8, 108, 8, 236, 8, 28, 8, 156, 8, 92, 8, 220, 8, 60, 8, 188, 8, 124, 8, 252, 8, 2, 8, 130, 8, 66, 8, 194, 8, 34, 8, 162, 8, 98, 8, 226, 8, 18, 8, 146, 8, 82, 8, 210, 8, 50, 8, 178, 8, 114, 8, 242, 8, 10, 8, 138, 8, 74, 8, 202, 8, 42, 8, 170, 8, 106, 8, 234, 8, 26, 8, 154, 8, 90, 8, 218, 8, 58, 8, 186, 8, 122, 8, 250, 8, 6, 8, 134, 8, 70, 8, 198, 8, 38, 8, 166, 8, 102, 8, 230, 8, 22, 8, 150, 8, 86, 8, 214, 8, 54, 8, 182, 8, 118, 8, 246, 8, 14, 8, 142, 8, 78, 8, 206, 8, 46, 8, 174, 8, 110, 8, 238, 8, 30, 8, 158, 8, 94, 8, 222, 8, 62, 8, 190, 8, 126, 8, 254, 8, 1, 8, 129, 8, 65, 8, 193, 8, 33, 8, 161, 8, 97, 8, 225, 8, 17, 8, 145, 8, 81, 8, 209, 8, 49, 8, 177, 8, 113, 8, 241, 8, 9, 8, 137, 8, 73, 8, 201, 8, 41, 8, 169, 8, 105, 8, 233, 8, 25, 8, 153, 8, 89, 8, 217, 8, 57, 8, 185, 8, 121, 8, 249, 8, 5, 8, 133, 8, 69, 8, 197, 8, 37, 8, 165, 8, 101, 8, 229, 8, 21, 8, 149, 8, 85, 8, 213, 8, 53, 8, 181, 8, 117, 8, 245, 8, 13, 8, 141, 8, 77, 8, 205, 8, 45, 8, 173, 8, 109, 8, 237, 8, 29, 8, 157, 8, 93, 8, 221, 8, 61, 8, 189, 8, 125, 8, 253, 8, 19, 9, 275, 9, 147, 9, 403, 9, 83, 9, 339, 9, 211, 9, 467, 9, 51, 9, 307, 9, 179, 9, 435, 9, 115, 9, 371, 9, 243, 9, 499, 9, 11, 9, 267, 9, 139, 9, 395, 9, 75, 9, 331, 9, 203, 9, 459, 9, 43, 9, 299, 9, 171, 9, 427, 9, 107, 9, 363, 9, 235, 9, 491, 9, 27, 9, 283, 9, 155, 9, 411, 9, 91, 9, 347, 9, 219, 9, 475, 9, 59, 9, 315, 9, 187, 9, 443, 9, 123, 9, 379, 9, 251, 9, 507, 9, 7, 9, 263, 9, 135, 9, 391, 9, 71, 9, 327, 9, 199, 9, 455, 9, 39, 9, 295, 9, 167, 9, 423, 9, 103, 9, 359, 9, 231, 9, 487, 9, 23, 9, 279, 9, 151, 9, 407, 9, 87, 9, 343, 9, 215, 9, 471, 9, 55, 9, 311, 9, 183, 9, 439, 9, 119, 9, 375, 9, 247, 9, 503, 9, 15, 9, 271, 9, 143, 9, 399, 9, 79, 9, 335, 9, 207, 9, 463, 9, 47, 9, 303, 9, 175, 9, 431, 9, 111, 9, 367, 9, 239, 9, 495, 9, 31, 9, 287, 9, 159, 9, 415, 9, 95, 9, 351, 9, 223, 9, 479, 9, 63, 9, 319, 9, 191, 9, 447, 9, 127, 9, 383, 9, 255, 9, 511, 9, 0, 7, 64, 7, 32, 7, 96, 7, 16, 7, 80, 7, 48, 7, 112, 7, 8, 7, 72, 7, 40, 7, 104, 7, 24, 7, 88, 7, 56, 7, 120, 7, 4, 7, 68, 7, 36, 7, 100, 7, 20, 7, 84, 7, 52, 7, 116, 7, 3, 8, 131, 8, 67, 8, 195, 8, 35, 8, 163, 8, 99, 8, 227, 8 ]; StaticTree.static_dtree = [ 0, 5, 16, 5, 8, 5, 24, 5, 4, 5, 20, 5, 12, 5, 28, 5, 2, 5, 18, 5, 10, 5, 26, 5, 6, 5, 22, 5, 14, 5, 30, 5, 1, 5, 17, 5, 9, 5, 25, 5, 5, 5, 21, 5, 13, 5, 29, 5, 3, 5, 19, 5, 11, 5, 27, 5, 7, 5, 23, 5 ]; StaticTree.static_l_desc = new StaticTree(StaticTree.static_ltree, Tree.extra_lbits, LITERALS + 1, L_CODES, MAX_BITS); StaticTree.static_d_desc = new StaticTree(StaticTree.static_dtree, Tree.extra_dbits, 0, D_CODES, MAX_BITS); StaticTree.static_bl_desc = new StaticTree(null, Tree.extra_blbits, 0, BL_CODES, MAX_BL_BITS); // Deflate var MAX_MEM_LEVEL = 9; var DEF_MEM_LEVEL = 8; function Config(good_length, max_lazy, nice_length, max_chain, func) { var that = this; that.good_length = good_length; that.max_lazy = max_lazy; that.nice_length = nice_length; that.max_chain = max_chain; that.func = func; } var STORED = 0; var FAST = 1; var SLOW = 2; var config_table = [ new Config(0, 0, 0, 0, STORED), new Config(4, 4, 8, 4, FAST), new Config(4, 5, 16, 8, FAST), new Config(4, 6, 32, 32, FAST), new Config(4, 4, 16, 16, SLOW), new Config(8, 16, 32, 32, SLOW), new Config(8, 16, 128, 128, SLOW), new Config(8, 32, 128, 256, SLOW), new Config(32, 128, 258, 1024, SLOW), new Config(32, 258, 258, 4096, SLOW) ]; var z_errmsg = [ "need dictionary", // Z_NEED_DICT // 2 "stream end", // Z_STREAM_END 1 "", // Z_OK 0 "", // Z_ERRNO (-1) "stream error", // Z_STREAM_ERROR (-2) "data error", // Z_DATA_ERROR (-3) "", // Z_MEM_ERROR (-4) "buffer error", // Z_BUF_ERROR (-5) "",// Z_VERSION_ERROR (-6) "" ]; // block not completed, need more input or more output var NeedMore = 0; // block flush performed var BlockDone = 1; // finish started, need only more output at next deflate var FinishStarted = 2; // finish done, accept no more input or output var FinishDone = 3; // preset dictionary flag in zlib header var PRESET_DICT = 0x20; var INIT_STATE = 42; var BUSY_STATE = 113; var FINISH_STATE = 666; // The deflate compression method var Z_DEFLATED = 8; var STORED_BLOCK = 0; var STATIC_TREES = 1; var DYN_TREES = 2; var MIN_MATCH = 3; var MAX_MATCH = 258; var MIN_LOOKAHEAD = (MAX_MATCH + MIN_MATCH + 1); function smaller(tree, n, m, depth) { var tn2 = tree[n * 2]; var tm2 = tree[m * 2]; return (tn2 < tm2 || (tn2 == tm2 && depth[n] <= depth[m])); } function Deflate() { var that = this; var strm; // pointer back to this zlib stream var status; // as the name implies // pending_buf; // output still pending var pending_buf_size; // size of pending_buf // pending_out; // next pending byte to output to the stream // pending; // nb of bytes in the pending buffer var method; // STORED (for zip only) or DEFLATED var last_flush; // value of flush param for previous deflate call var w_size; // LZ77 window size (32K by default) var w_bits; // log2(w_size) (8..16) var w_mask; // w_size - 1 var window; // Sliding window. Input bytes are read into the second half of the window, // and move to the first half later to keep a dictionary of at least wSize // bytes. With this organization, matches are limited to a distance of // wSize-MAX_MATCH bytes, but this ensures that IO is always // performed with a length multiple of the block size. Also, it limits // the window size to 64K, which is quite useful on MSDOS. // To do: use the user input buffer as sliding window. var window_size; // Actual size of window: 2*wSize, except when the user input buffer // is directly used as sliding window. var prev; // Link to older string with same hash index. To limit the size of this // array to 64K, this link is maintained only for the last 32K strings. // An index in this array is thus a window index modulo 32K. var head; // Heads of the hash chains or NIL. var ins_h; // hash index of string to be inserted var hash_size; // number of elements in hash table var hash_bits; // log2(hash_size) var hash_mask; // hash_size-1 // Number of bits by which ins_h must be shifted at each input // step. It must be such that after MIN_MATCH steps, the oldest // byte no longer takes part in the hash key, that is: // hash_shift * MIN_MATCH >= hash_bits var hash_shift; // Window position at the beginning of the current output block. Gets // negative when the window is moved backwards. var block_start; var match_length; // length of best match var prev_match; // previous match var match_available; // set if previous match exists var strstart; // start of string to insert var match_start; // start of matching string var lookahead; // number of valid bytes ahead in window // Length of the best match at previous step. Matches not greater than this // are discarded. This is used in the lazy match evaluation. var prev_length; // To speed up deflation, hash chains are never searched beyond this // length. A higher limit improves compression ratio but degrades the speed. var max_chain_length; // Attempt to find a better match only when the current match is strictly // smaller than this value. This mechanism is used only for compression // levels >= 4. var max_lazy_match; // Insert new strings in the hash table only if the match length is not // greater than this length. This saves time but degrades compression. // max_insert_length is used only for compression levels <= 3. var level; // compression level (1..9) var strategy; // favor or force Huffman coding // Use a faster search when the previous match is longer than this var good_match; // Stop searching when current match exceeds this var nice_match; var dyn_ltree; // literal and length tree var dyn_dtree; // distance tree var bl_tree; // Huffman tree for bit lengths var l_desc = new Tree(); // desc for literal tree var d_desc = new Tree(); // desc for distance tree var bl_desc = new Tree(); // desc for bit length tree // that.heap_len; // number of elements in the heap // that.heap_max; // element of largest frequency // The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used. // The same heap array is used to build all trees. // Depth of each subtree used as tie breaker for trees of equal frequency that.depth = []; var l_buf; // index for literals or lengths */ // Size of match buffer for literals/lengths. There are 4 reasons for // limiting lit_bufsize to 64K: // - frequencies can be kept in 16 bit counters // - if compression is not successful for the first block, all input // data is still in the window so we can still emit a stored block even // when input comes from standard input. (This can also be done for // all blocks if lit_bufsize is not greater than 32K.) // - if compression is not successful for a file smaller than 64K, we can // even emit a stored file instead of a stored block (saving 5 bytes). // This is applicable only for zip (not gzip or zlib). // - creating new Huffman trees less frequently may not provide fast // adaptation to changes in the input data statistics. (Take for // example a binary file with poorly compressible code followed by // a highly compressible string table.) Smaller buffer sizes give // fast adaptation but have of course the overhead of transmitting // trees more frequently. // - I can't count above 4 var lit_bufsize; var last_lit; // running index in l_buf // Buffer for distances. To simplify the code, d_buf and l_buf have // the same number of elements. To use different lengths, an extra flag // array would be necessary. var d_buf; // index of pendig_buf // that.opt_len; // bit length of current block with optimal trees // that.static_len; // bit length of current block with static trees var matches; // number of string matches in current block var last_eob_len; // bit length of EOB code for last block // Output buffer. bits are inserted starting at the bottom (least // significant bits). var bi_buf; // Number of valid bits in bi_buf. All bits above the last valid bit // are always zero. var bi_valid; // number of codes at each bit length for an optimal tree that.bl_count = []; // heap used to build the Huffman trees that.heap = []; dyn_ltree = []; dyn_dtree = []; bl_tree = []; function lm_init() { var i; window_size = 2 * w_size; head[hash_size - 1] = 0; for (i = 0; i < hash_size - 1; i++) { head[i] = 0; } // Set the default configuration parameters: max_lazy_match = config_table[level].max_lazy; good_match = config_table[level].good_length; nice_match = config_table[level].nice_length; max_chain_length = config_table[level].max_chain; strstart = 0; block_start = 0; lookahead = 0; match_length = prev_length = MIN_MATCH - 1; match_available = 0; ins_h = 0; } function init_block() { var i; // Initialize the trees. for (i = 0; i < L_CODES; i++) dyn_ltree[i * 2] = 0; for (i = 0; i < D_CODES; i++) dyn_dtree[i * 2] = 0; for (i = 0; i < BL_CODES; i++) bl_tree[i * 2] = 0; dyn_ltree[END_BLOCK * 2] = 1; that.opt_len = that.static_len = 0; last_lit = matches = 0; } // Initialize the tree data structures for a new zlib stream. function tr_init() { l_desc.dyn_tree = dyn_ltree; l_desc.stat_desc = StaticTree.static_l_desc; d_desc.dyn_tree = dyn_dtree; d_desc.stat_desc = StaticTree.static_d_desc; bl_desc.dyn_tree = bl_tree; bl_desc.stat_desc = StaticTree.static_bl_desc; bi_buf = 0; bi_valid = 0; last_eob_len = 8; // enough lookahead for inflate // Initialize the first block of the first file: init_block(); } // Restore the heap property by moving down the tree starting at node k, // exchanging a node with the smallest of its two sons if necessary, // stopping // when the heap property is re-established (each father smaller than its // two sons). that.pqdownheap = function(tree, // the tree to restore k // node to move down ) { var heap = that.heap; var v = heap[k]; var j = k << 1; // left son of k while (j <= that.heap_len) { // Set j to the smallest of the two sons: if (j < that.heap_len && smaller(tree, heap[j + 1], heap[j], that.depth)) { j++; } // Exit if v is smaller than both sons if (smaller(tree, v, heap[j], that.depth)) break; // Exchange v with the smallest son heap[k] = heap[j]; k = j; // And continue down the tree, setting j to the left son of k j <<= 1; } heap[k] = v; }; // Scan a literal or distance tree to determine the frequencies of the codes // in the bit length tree. function scan_tree(tree,// the tree to be scanned max_code // and its largest code of non zero frequency ) { var n; // iterates over all tree elements var prevlen = -1; // last emitted length var curlen; // length of current code var nextlen = tree[0 * 2 + 1]; // length of next code var count = 0; // repeat count of the current code var max_count = 7; // max repeat count var min_count = 4; // min repeat count if (nextlen === 0) { max_count = 138; min_count = 3; } tree[(max_code + 1) * 2 + 1] = 0xffff; // guard for (n = 0; n <= max_code; n++) { curlen = nextlen; nextlen = tree[(n + 1) * 2 + 1]; if (++count < max_count && curlen == nextlen) { continue; } else if (count < min_count) { bl_tree[curlen * 2] += count; } else if (curlen !== 0) { if (curlen != prevlen) bl_tree[curlen * 2]++; bl_tree[REP_3_6 * 2]++; } else if (count <= 10) { bl_tree[REPZ_3_10 * 2]++; } else { bl_tree[REPZ_11_138 * 2]++; } count = 0; prevlen = curlen; if (nextlen === 0) { max_count = 138; min_count = 3; } else if (curlen == nextlen) { max_count = 6; min_count = 3; } else { max_count = 7; min_count = 4; } } } // Construct the Huffman tree for the bit lengths and return the index in // bl_order of the last bit length code to send. function build_bl_tree() { var max_blindex; // index of last bit length code of non zero freq // Determine the bit length frequencies for literal and distance trees scan_tree(dyn_ltree, l_desc.max_code); scan_tree(dyn_dtree, d_desc.max_code); // Build the bit length tree: bl_desc.build_tree(that); // opt_len now includes the length of the tree representations, except // the lengths of the bit lengths codes and the 5+5+4 bits for the // counts. // Determine the number of bit length codes to send. The pkzip format // requires that at least 4 bit length codes be sent. (appnote.txt says // 3 but the actual value used is 4.) for (max_blindex = BL_CODES - 1; max_blindex >= 3; max_blindex--) { if (bl_tree[Tree.bl_order[max_blindex] * 2 + 1] !== 0) break; } // Update opt_len to include the bit length tree and counts that.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4; return max_blindex; } // Output a byte on the stream. // IN assertion: there is enough room in pending_buf. function put_byte(p) { that.pending_buf[that.pending++] = p; } function put_short(w) { put_byte(w & 0xff); put_byte((w >>> 8) & 0xff); } function putShortMSB(b) { put_byte((b >> 8) & 0xff); put_byte((b & 0xff) & 0xff); } function send_bits(value, length) { var val, len = length; if (bi_valid > Buf_size - len) { val = value; // bi_buf |= (val << bi_valid); bi_buf |= ((val << bi_valid) & 0xffff); put_short(bi_buf); bi_buf = val >>> (Buf_size - bi_valid); bi_valid += len - Buf_size; } else { // bi_buf |= (value) << bi_valid; bi_buf |= (((value) << bi_valid) & 0xffff); bi_valid += len; } } function send_code(c, tree) { var c2 = c * 2; send_bits(tree[c2] & 0xffff, tree[c2 + 1] & 0xffff); } // Send a literal or distance tree in compressed form, using the codes in // bl_tree. function send_tree(tree,// the tree to be sent max_code // and its largest code of non zero frequency ) { var n; // iterates over all tree elements var prevlen = -1; // last emitted length var curlen; // length of current code var nextlen = tree[0 * 2 + 1]; // length of next code var count = 0; // repeat count of the current code var max_count = 7; // max repeat count var min_count = 4; // min repeat count if (nextlen === 0) { max_count = 138; min_count = 3; } for (n = 0; n <= max_code; n++) { curlen = nextlen; nextlen = tree[(n + 1) * 2 + 1]; if (++count < max_count && curlen == nextlen) { continue; } else if (count < min_count) { do { send_code(curlen, bl_tree); } while (--count !== 0); } else if (curlen !== 0) { if (curlen != prevlen) { send_code(curlen, bl_tree); count--; } send_code(REP_3_6, bl_tree); send_bits(count - 3, 2); } else if (count <= 10) { send_code(REPZ_3_10, bl_tree); send_bits(count - 3, 3); } else { send_code(REPZ_11_138, bl_tree); send_bits(count - 11, 7); } count = 0; prevlen = curlen; if (nextlen === 0) { max_count = 138; min_count = 3; } else if (curlen == nextlen) { max_count = 6; min_count = 3; } else { max_count = 7; min_count = 4; } } } // Send the header for a block using dynamic Huffman trees: the counts, the // lengths of the bit length codes, the literal tree and the distance tree. // IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. function send_all_trees(lcodes, dcodes, blcodes) { var rank; // index in bl_order send_bits(lcodes - 257, 5); // not +255 as stated in appnote.txt send_bits(dcodes - 1, 5); send_bits(blcodes - 4, 4); // not -3 as stated in appnote.txt for (rank = 0; rank < blcodes; rank++) { send_bits(bl_tree[Tree.bl_order[rank] * 2 + 1], 3); } send_tree(dyn_ltree, lcodes - 1); // literal tree send_tree(dyn_dtree, dcodes - 1); // distance tree } // Flush the bit buffer, keeping at most 7 bits in it. function bi_flush() { if (bi_valid == 16) { put_short(bi_buf); bi_buf = 0; bi_valid = 0; } else if (bi_valid >= 8) { put_byte(bi_buf & 0xff); bi_buf >>>= 8; bi_valid -= 8; } } // Send one empty static block to give enough lookahead for inflate. // This takes 10 bits, of which 7 may remain in the bit buffer. // The current inflate code requires 9 bits of lookahead. If the // last two codes for the previous block (real code plus EOB) were coded // on 5 bits or less, inflate may have only 5+3 bits of lookahead to decode // the last real code. In this case we send two empty static blocks instead // of one. (There are no problems if the previous block is stored or fixed.) // To simplify the code, we assume the worst case of last real code encoded // on one bit only. function _tr_align() { send_bits(STATIC_TREES << 1, 3); send_code(END_BLOCK, StaticTree.static_ltree); bi_flush(); // Of the 10 bits for the empty block, we have already sent // (10 - bi_valid) bits. The lookahead for the last real code (before // the EOB of the previous block) was thus at least one plus the length // of the EOB plus what we have just sent of the empty static block. if (1 + last_eob_len + 10 - bi_valid < 9) { send_bits(STATIC_TREES << 1, 3); send_code(END_BLOCK, StaticTree.static_ltree); bi_flush(); } last_eob_len = 7; } // Save the match info and tally the frequency counts. Return true if // the current block must be flushed. function _tr_tally(dist, // distance of matched string lc // match length-MIN_MATCH or unmatched char (if dist==0) ) { var out_length, in_length, dcode; that.pending_buf[d_buf + last_lit * 2] = (dist >>> 8) & 0xff; that.pending_buf[d_buf + last_lit * 2 + 1] = dist & 0xff; that.pending_buf[l_buf + last_lit] = lc & 0xff; last_lit++; if (dist === 0) { // lc is the unmatched char dyn_ltree[lc * 2]++; } else { matches++; // Here, lc is the match length - MIN_MATCH dist--; // dist = match distance - 1 dyn_ltree[(Tree._length_code[lc] + LITERALS + 1) * 2]++; dyn_dtree[Tree.d_code(dist) * 2]++; } if ((last_lit & 0x1fff) === 0 && level > 2) { // Compute an upper bound for the compressed length out_length = last_lit * 8; in_length = strstart - block_start; for (dcode = 0; dcode < D_CODES; dcode++) { out_length += dyn_dtree[dcode * 2] * (5 + Tree.extra_dbits[dcode]); } out_length >>>= 3; if ((matches < Math.floor(last_lit / 2)) && out_length < Math.floor(in_length / 2)) return true; } return (last_lit == lit_bufsize - 1); // We avoid equality with lit_bufsize because of wraparound at 64K // on 16 bit machines and because stored blocks are restricted to // 64K-1 bytes. } // Send the block data compressed using the given Huffman trees function compress_block(ltree, dtree) { var dist; // distance of matched string var lc; // match length or unmatched char (if dist === 0) var lx = 0; // running index in l_buf var code; // the code to send var extra; // number of extra bits to send if (last_lit !== 0) { do { dist = ((that.pending_buf[d_buf + lx * 2] << 8) & 0xff00) | (that.pending_buf[d_buf + lx * 2 + 1] & 0xff); lc = (that.pending_buf[l_buf + lx]) & 0xff; lx++; if (dist === 0) { send_code(lc, ltree); // send a literal byte } else { // Here, lc is the match length - MIN_MATCH code = Tree._length_code[lc]; send_code(code + LITERALS + 1, ltree); // send the length // code extra = Tree.extra_lbits[code]; if (extra !== 0) { lc -= Tree.base_length[code]; send_bits(lc, extra); // send the extra length bits } dist--; // dist is now the match distance - 1 code = Tree.d_code(dist); send_code(code, dtree); // send the distance code extra = Tree.extra_dbits[code]; if (extra !== 0) { dist -= Tree.base_dist[code]; send_bits(dist, extra); // send the extra distance bits } } // literal or match pair ? // Check that the overlay between pending_buf and d_buf+l_buf is // ok: } while (lx < last_lit); } send_code(END_BLOCK, ltree); last_eob_len = ltree[END_BLOCK * 2 + 1]; } // Flush the bit buffer and align the output on a byte boundary function bi_windup() { if (bi_valid > 8) { put_short(bi_buf); } else if (bi_valid > 0) { put_byte(bi_buf & 0xff); } bi_buf = 0; bi_valid = 0; } // Copy a stored block, storing first the length and its // one's complement if requested. function copy_block(buf, // the input data len, // its length header // true if block header must be written ) { bi_windup(); // align on byte boundary last_eob_len = 8; // enough lookahead for inflate if (header) { put_short(len); put_short(~len); } that.pending_buf.set(window.subarray(buf, buf + len), that.pending); that.pending += len; } // Send a stored block function _tr_stored_block(buf, // input block stored_len, // length of input block eof // true if this is the last block for a file ) { send_bits((STORED_BLOCK << 1) + (eof ? 1 : 0), 3); // send block type copy_block(buf, stored_len, true); // with header } // Determine the best encoding for the current block: dynamic trees, static // trees or store, and output the encoded block to the zip file. function _tr_flush_block(buf, // input block, or NULL if too old stored_len, // length of input block eof // true if this is the last block for a file ) { var opt_lenb, static_lenb;// opt_len and static_len in bytes var max_blindex = 0; // index of last bit length code of non zero freq // Build the Huffman trees unless a stored block is forced if (level > 0) { // Construct the literal and distance trees l_desc.build_tree(that); d_desc.build_tree(that); // At this point, opt_len and static_len are the total bit lengths // of // the compressed block data, excluding the tree representations. // Build the bit length tree for the above two trees, and get the // index // in bl_order of the last bit length code to send. max_blindex = build_bl_tree(); // Determine the best encoding. Compute first the block length in // bytes opt_lenb = (that.opt_len + 3 + 7) >>> 3; static_lenb = (that.static_len + 3 + 7) >>> 3; if (static_lenb <= opt_lenb) opt_lenb = static_lenb; } else { opt_lenb = static_lenb = stored_len + 5; // force a stored block } if ((stored_len + 4 <= opt_lenb) && buf != -1) { // 4: two words for the lengths // The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. // Otherwise we can't have processed more than WSIZE input bytes // since // the last block flush, because compression would have been // successful. If LIT_BUFSIZE <= WSIZE, it is never too late to // transform a block into a stored block. _tr_stored_block(buf, stored_len, eof); } else if (static_lenb == opt_lenb) { send_bits((STATIC_TREES << 1) + (eof ? 1 : 0), 3); compress_block(StaticTree.static_ltree, StaticTree.static_dtree); } else { send_bits((DYN_TREES << 1) + (eof ? 1 : 0), 3); send_all_trees(l_desc.max_code + 1, d_desc.max_code + 1, max_blindex + 1); compress_block(dyn_ltree, dyn_dtree); } // The above check is made mod 2^32, for files larger than 512 MB // and uLong implemented on 32 bits. init_block(); if (eof) { bi_windup(); } } function flush_block_only(eof) { _tr_flush_block(block_start >= 0 ? block_start : -1, strstart - block_start, eof); block_start = strstart; strm.flush_pending(); } // Fill the window when the lookahead becomes insufficient. // Updates strstart and lookahead. // // IN assertion: lookahead < MIN_LOOKAHEAD // OUT assertions: strstart <= window_size-MIN_LOOKAHEAD // At least one byte has been read, or avail_in === 0; reads are // performed for at least two bytes (required for the zip translate_eol // option -- not supported here). function fill_window() { var n, m; var p; var more; // Amount of free space at the end of the window. do { more = (window_size - lookahead - strstart); // Deal with !@#$% 64K limit: if (more === 0 && strstart === 0 && lookahead === 0) { more = w_size; } else if (more == -1) { // Very unlikely, but possible on 16 bit machine if strstart == // 0 // and lookahead == 1 (input done one byte at time) more--; // If the window is almost full and there is insufficient // lookahead, // move the upper half to the lower one to make room in the // upper half. } else if (strstart >= w_size + w_size - MIN_LOOKAHEAD) { window.set(window.subarray(w_size, w_size + w_size), 0); match_start -= w_size; strstart -= w_size; // we now have strstart >= MAX_DIST block_start -= w_size; // Slide the hash table (could be avoided with 32 bit values // at the expense of memory usage). We slide even when level == // 0 // to keep the hash table consistent if we switch back to level // > 0 // later. (Using level 0 permanently is not an optimal usage of // zlib, so we don't care about this pathological case.) n = hash_size; p = n; do { m = (head[--p] & 0xffff); head[p] = (m >= w_size ? m - w_size : 0); } while (--n !== 0); n = w_size; p = n; do { m = (prev[--p] & 0xffff); prev[p] = (m >= w_size ? m - w_size : 0); // If n is not on any hash chain, prev[n] is garbage but // its value will never be used. } while (--n !== 0); more += w_size; } if (strm.avail_in === 0) return; // If there was no sliding: // strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 && // more == window_size - lookahead - strstart // => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1) // => more >= window_size - 2*WSIZE + 2 // In the BIG_MEM or MMAP case (not yet supported), // window_size == input_size + MIN_LOOKAHEAD && // strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD. // Otherwise, window_size == 2*WSIZE so more >= 2. // If there was sliding, more >= WSIZE. So in all cases, more >= 2. n = strm.read_buf(window, strstart + lookahead, more); lookahead += n; // Initialize the hash value now that we have some input: if (lookahead >= MIN_MATCH) { ins_h = window[strstart] & 0xff; ins_h = (((ins_h) << hash_shift) ^ (window[strstart + 1] & 0xff)) & hash_mask; } // If the whole input has less than MIN_MATCH bytes, ins_h is // garbage, // but this is not important since only literal bytes will be // emitted. } while (lookahead < MIN_LOOKAHEAD && strm.avail_in !== 0); } // Copy without compression as much as possible from the input stream, // return // the current block state. // This function does not insert new strings in the dictionary since // uncompressible data is probably not useful. This function is used // only for the level=0 compression option. // NOTE: this function should be optimized to avoid extra copying from // window to pending_buf. function deflate_stored(flush) { // Stored blocks are limited to 0xffff bytes, pending_buf is limited // to pending_buf_size, and each stored block has a 5 byte header: var max_block_size = 0xffff; var max_start; if (max_block_size > pending_buf_size - 5) { max_block_size = pending_buf_size - 5; } // Copy as much as possible from input to output: while (true) { // Fill the window as much as possible: if (lookahead <= 1) { fill_window(); if (lookahead === 0 && flush == Z_NO_FLUSH) return NeedMore; if (lookahead === 0) break; // flush the current block } strstart += lookahead; lookahead = 0; // Emit a stored block if pending_buf will be full: max_start = block_start + max_block_size; if (strstart === 0 || strstart >= max_start) { // strstart === 0 is possible when wraparound on 16-bit machine lookahead = (strstart - max_start); strstart = max_start; flush_block_only(false); if (strm.avail_out === 0) return NeedMore; } // Flush if we may have to slide, otherwise block_start may become // negative and the data will be gone: if (strstart - block_start >= w_size - MIN_LOOKAHEAD) { flush_block_only(false); if (strm.avail_out === 0) return NeedMore; } } flush_block_only(flush == Z_FINISH); if (strm.avail_out === 0) return (flush == Z_FINISH) ? FinishStarted : NeedMore; return flush == Z_FINISH ? FinishDone : BlockDone; } function longest_match(cur_match) { var chain_length = max_chain_length; // max hash chain length var scan = strstart; // current string var match; // matched string var len; // length of current match var best_len = prev_length; // best match length so far var limit = strstart > (w_size - MIN_LOOKAHEAD) ? strstart - (w_size - MIN_LOOKAHEAD) : 0; var _nice_match = nice_match; // Stop when cur_match becomes <= limit. To simplify the code, // we prevent matches with the string of window index 0. var wmask = w_mask; var strend = strstart + MAX_MATCH; var scan_end1 = window[scan + best_len - 1]; var scan_end = window[scan + best_len]; // The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of // 16. // It is easy to get rid of this optimization if necessary. // Do not waste too much time if we already have a good match: if (prev_length >= good_match) { chain_length >>= 2; } // Do not look for matches beyond the end of the input. This is // necessary // to make deflate deterministic. if (_nice_match > lookahead) _nice_match = lookahead; do { match = cur_match; // Skip to next match if the match length cannot increase // or if the match length is less than 2: if (window[match + best_len] != scan_end || window[match + best_len - 1] != scan_end1 || window[match] != window[scan] || window[++match] != window[scan + 1]) continue; // The check at best_len-1 can be removed because it will be made // again later. (This heuristic is not always a win.) // It is not necessary to compare scan[2] and match[2] since they // are always equal when the other bytes match, given that // the hash keys are equal and that HASH_BITS >= 8. scan += 2; match++; // We check for insufficient lookahead only every 8th comparison; // the 256th check will be made at strstart+258. do { } while (window[++scan] == window[++match] && window[++scan] == window[++match] && window[++scan] == window[++match] && window[++scan] == window[++match] && window[++scan] == window[++match] && window[++scan] == window[++match] && window[++scan] == window[++match] && window[++scan] == window[++match] && scan < strend); len = MAX_MATCH - (strend - scan); scan = strend - MAX_MATCH; if (len > best_len) { match_start = cur_match; best_len = len; if (len >= _nice_match) break; scan_end1 = window[scan + best_len - 1]; scan_end = window[scan + best_len]; } } while ((cur_match = (prev[cur_match & wmask] & 0xffff)) > limit && --chain_length !== 0); if (best_len <= lookahead) return best_len; return lookahead; } // Compress as much as possible from the input stream, return the current // block state. // This function does not perform lazy evaluation of matches and inserts // new strings in the dictionary only for unmatched strings or for short // matches. It is used only for the fast compression options. function deflate_fast(flush) { // short hash_head = 0; // head of the hash chain var hash_head = 0; // head of the hash chain var bflush; // set if current block must be flushed while (true) { // Make sure that we always have enough lookahead, except // at the end of the input file. We need MAX_MATCH bytes // for the next match, plus MIN_MATCH bytes to insert the // string following the next match. if (lookahead < MIN_LOOKAHEAD) { fill_window(); if (lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) { return NeedMore; } if (lookahead === 0) break; // flush the current block } // Insert the string window[strstart .. strstart+2] in the // dictionary, and set hash_head to the head of the hash chain: if (lookahead >= MIN_MATCH) { ins_h = (((ins_h) << hash_shift) ^ (window[(strstart) + (MIN_MATCH - 1)] & 0xff)) & hash_mask; // prev[strstart&w_mask]=hash_head=head[ins_h]; hash_head = (head[ins_h] & 0xffff); prev[strstart & w_mask] = head[ins_h]; head[ins_h] = strstart; } // Find the longest match, discarding those <= prev_length. // At this point we have always match_length < MIN_MATCH if (hash_head !== 0 && ((strstart - hash_head) & 0xffff) <= w_size - MIN_LOOKAHEAD) { // To simplify the code, we prevent matches with the string // of window index 0 (in particular we have to avoid a match // of the string with itself at the start of the input file). if (strategy != Z_HUFFMAN_ONLY) { match_length = longest_match(hash_head); } // longest_match() sets match_start } if (match_length >= MIN_MATCH) { // check_match(strstart, match_start, match_length); bflush = _tr_tally(strstart - match_start, match_length - MIN_MATCH); lookahead -= match_length; // Insert new strings in the hash table only if the match length // is not too large. This saves time but degrades compression. if (match_length <= max_lazy_match && lookahead >= MIN_MATCH) { match_length--; // string at strstart already in hash table do { strstart++; ins_h = ((ins_h << hash_shift) ^ (window[(strstart) + (MIN_MATCH - 1)] & 0xff)) & hash_mask; // prev[strstart&w_mask]=hash_head=head[ins_h]; hash_head = (head[ins_h] & 0xffff); prev[strstart & w_mask] = head[ins_h]; head[ins_h] = strstart; // strstart never exceeds WSIZE-MAX_MATCH, so there are // always MIN_MATCH bytes ahead. } while (--match_length !== 0); strstart++; } else { strstart += match_length; match_length = 0; ins_h = window[strstart] & 0xff; ins_h = (((ins_h) << hash_shift) ^ (window[strstart + 1] & 0xff)) & hash_mask; // If lookahead < MIN_MATCH, ins_h is garbage, but it does // not // matter since it will be recomputed at next deflate call. } } else { // No match, output a literal byte bflush = _tr_tally(0, window[strstart] & 0xff); lookahead--; strstart++; } if (bflush) { flush_block_only(false); if (strm.avail_out === 0) return NeedMore; } } flush_block_only(flush == Z_FINISH); if (strm.avail_out === 0) { if (flush == Z_FINISH) return FinishStarted; else return NeedMore; } return flush == Z_FINISH ? FinishDone : BlockDone; } // Same as above, but achieves better compression. We use a lazy // evaluation for matches: a match is finally adopted only if there is // no better match at the next window position. function deflate_slow(flush) { // short hash_head = 0; // head of hash chain var hash_head = 0; // head of hash chain var bflush; // set if current block must be flushed var max_insert; // Process the input block. while (true) { // Make sure that we always have enough lookahead, except // at the end of the input file. We need MAX_MATCH bytes // for the next match, plus MIN_MATCH bytes to insert the // string following the next match. if (lookahead < MIN_LOOKAHEAD) { fill_window(); if (lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) { return NeedMore; } if (lookahead === 0) break; // flush the current block } // Insert the string window[strstart .. strstart+2] in the // dictionary, and set hash_head to the head of the hash chain: if (lookahead >= MIN_MATCH) { ins_h = (((ins_h) << hash_shift) ^ (window[(strstart) + (MIN_MATCH - 1)] & 0xff)) & hash_mask; // prev[strstart&w_mask]=hash_head=head[ins_h]; hash_head = (head[ins_h] & 0xffff); prev[strstart & w_mask] = head[ins_h]; head[ins_h] = strstart; } // Find the longest match, discarding those <= prev_length. prev_length = match_length; prev_match = match_start; match_length = MIN_MATCH - 1; if (hash_head !== 0 && prev_length < max_lazy_match && ((strstart - hash_head) & 0xffff) <= w_size - MIN_LOOKAHEAD) { // To simplify the code, we prevent matches with the string // of window index 0 (in particular we have to avoid a match // of the string with itself at the start of the input file). if (strategy != Z_HUFFMAN_ONLY) { match_length = longest_match(hash_head); } // longest_match() sets match_start if (match_length <= 5 && (strategy == Z_FILTERED || (match_length == MIN_MATCH && strstart - match_start > 4096))) { // If prev_match is also MIN_MATCH, match_start is garbage // but we will ignore the current match anyway. match_length = MIN_MATCH - 1; } } // If there was a match at the previous step and the current // match is not better, output the previous match: if (prev_length >= MIN_MATCH && match_length <= prev_length) { max_insert = strstart + lookahead - MIN_MATCH; // Do not insert strings in hash table beyond this. // check_match(strstart-1, prev_match, prev_length); bflush = _tr_tally(strstart - 1 - prev_match, prev_length - MIN_MATCH); // Insert in hash table all strings up to the end of the match. // strstart-1 and strstart are already inserted. If there is not // enough lookahead, the last two strings are not inserted in // the hash table. lookahead -= prev_length - 1; prev_length -= 2; do { if (++strstart <= max_insert) { ins_h = (((ins_h) << hash_shift) ^ (window[(strstart) + (MIN_MATCH - 1)] & 0xff)) & hash_mask; // prev[strstart&w_mask]=hash_head=head[ins_h]; hash_head = (head[ins_h] & 0xffff); prev[strstart & w_mask] = head[ins_h]; head[ins_h] = strstart; } } while (--prev_length !== 0); match_available = 0; match_length = MIN_MATCH - 1; strstart++; if (bflush) { flush_block_only(false); if (strm.avail_out === 0) return NeedMore; } } else if (match_available !== 0) { // If there was no match at the previous position, output a // single literal. If there was a match but the current match // is longer, truncate the previous match to a single literal. bflush = _tr_tally(0, window[strstart - 1] & 0xff); if (bflush) { flush_block_only(false); } strstart++; lookahead--; if (strm.avail_out === 0) return NeedMore; } else { // There is no previous match to compare with, wait for // the next step to decide. match_available = 1; strstart++; lookahead--; } } if (match_available !== 0) { bflush = _tr_tally(0, window[strstart - 1] & 0xff); match_available = 0; } flush_block_only(flush == Z_FINISH); if (strm.avail_out === 0) { if (flush == Z_FINISH) return FinishStarted; else return NeedMore; } return flush == Z_FINISH ? FinishDone : BlockDone; } function deflateReset(strm) { strm.total_in = strm.total_out = 0; strm.msg = null; // that.pending = 0; that.pending_out = 0; status = BUSY_STATE; last_flush = Z_NO_FLUSH; tr_init(); lm_init(); return Z_OK; } that.deflateInit = function(strm, _level, bits, _method, memLevel, _strategy) { if (!_method) _method = Z_DEFLATED; if (!memLevel) memLevel = DEF_MEM_LEVEL; if (!_strategy) _strategy = Z_DEFAULT_STRATEGY; // byte[] my_version=ZLIB_VERSION; // // if (!version || version[0] != my_version[0] // || stream_size != sizeof(z_stream)) { // return Z_VERSION_ERROR; // } strm.msg = null; if (_level == Z_DEFAULT_COMPRESSION) _level = 6; if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || _method != Z_DEFLATED || bits < 9 || bits > 15 || _level < 0 || _level > 9 || _strategy < 0 || _strategy > Z_HUFFMAN_ONLY) { return Z_STREAM_ERROR; } strm.dstate = that; w_bits = bits; w_size = 1 << w_bits; w_mask = w_size - 1; hash_bits = memLevel + 7; hash_size = 1 << hash_bits; hash_mask = hash_size - 1; hash_shift = Math.floor((hash_bits + MIN_MATCH - 1) / MIN_MATCH); window = new Uint8Array(w_size * 2); prev = []; head = []; lit_bufsize = 1 << (memLevel + 6); // 16K elements by default // We overlay pending_buf and d_buf+l_buf. This works since the average // output size for (length,distance) codes is <= 24 bits. that.pending_buf = new Uint8Array(lit_bufsize * 4); pending_buf_size = lit_bufsize * 4; d_buf = Math.floor(lit_bufsize / 2); l_buf = (1 + 2) * lit_bufsize; level = _level; strategy = _strategy; method = _method & 0xff; return deflateReset(strm); }; that.deflateEnd = function() { if (status != INIT_STATE && status != BUSY_STATE && status != FINISH_STATE) { return Z_STREAM_ERROR; } // Deallocate in reverse order of allocations: that.pending_buf = null; head = null; prev = null; window = null; // free that.dstate = null; return status == BUSY_STATE ? Z_DATA_ERROR : Z_OK; }; that.deflateParams = function(strm, _level, _strategy) { var err = Z_OK; if (_level == Z_DEFAULT_COMPRESSION) { _level = 6; } if (_level < 0 || _level > 9 || _strategy < 0 || _strategy > Z_HUFFMAN_ONLY) { return Z_STREAM_ERROR; } if (config_table[level].func != config_table[_level].func && strm.total_in !== 0) { // Flush the last buffer: err = strm.deflate(Z_PARTIAL_FLUSH); } if (level != _level) { level = _level; max_lazy_match = config_table[level].max_lazy; good_match = config_table[level].good_length; nice_match = config_table[level].nice_length; max_chain_length = config_table[level].max_chain; } strategy = _strategy; return err; }; that.deflateSetDictionary = function(strm, dictionary, dictLength) { var length = dictLength; var n, index = 0; if (!dictionary || status != INIT_STATE) return Z_STREAM_ERROR; if (length < MIN_MATCH) return Z_OK; if (length > w_size - MIN_LOOKAHEAD) { length = w_size - MIN_LOOKAHEAD; index = dictLength - length; // use the tail of the dictionary } window.set(dictionary.subarray(index, index + length), 0); strstart = length; block_start = length; // Insert all strings in the hash table (except for the last two bytes). // s->lookahead stays null, so s->ins_h will be recomputed at the next // call of fill_window. ins_h = window[0] & 0xff; ins_h = (((ins_h) << hash_shift) ^ (window[1] & 0xff)) & hash_mask; for (n = 0; n <= length - MIN_MATCH; n++) { ins_h = (((ins_h) << hash_shift) ^ (window[(n) + (MIN_MATCH - 1)] & 0xff)) & hash_mask; prev[n & w_mask] = head[ins_h]; head[ins_h] = n; } return Z_OK; }; that.deflate = function(_strm, flush) { var i, header, level_flags, old_flush, bstate; if (flush > Z_FINISH || flush < 0) { return Z_STREAM_ERROR; } if (!_strm.next_out || (!_strm.next_in && _strm.avail_in !== 0) || (status == FINISH_STATE && flush != Z_FINISH)) { _strm.msg = z_errmsg[Z_NEED_DICT - (Z_STREAM_ERROR)]; return Z_STREAM_ERROR; } if (_strm.avail_out === 0) { _strm.msg = z_errmsg[Z_NEED_DICT - (Z_BUF_ERROR)]; return Z_BUF_ERROR; } strm = _strm; // just in case old_flush = last_flush; last_flush = flush; // Write the zlib header if (status == INIT_STATE) { header = (Z_DEFLATED + ((w_bits - 8) << 4)) << 8; level_flags = ((level - 1) & 0xff) >> 1; if (level_flags > 3) level_flags = 3; header |= (level_flags << 6); if (strstart !== 0) header |= PRESET_DICT; header += 31 - (header % 31); status = BUSY_STATE; putShortMSB(header); } // Flush as much pending output as possible if (that.pending !== 0) { strm.flush_pending(); if (strm.avail_out === 0) { // console.log(" avail_out==0"); // Since avail_out is 0, deflate will be called again with // more output space, but possibly with both pending and // avail_in equal to zero. There won't be anything to do, // but this is not an error situation so make sure we // return OK instead of BUF_ERROR at next call of deflate: last_flush = -1; return Z_OK; } // Make sure there is something to do and avoid duplicate // consecutive // flushes. For repeated and useless calls with Z_FINISH, we keep // returning Z_STREAM_END instead of Z_BUFF_ERROR. } else if (strm.avail_in === 0 && flush <= old_flush && flush != Z_FINISH) { strm.msg = z_errmsg[Z_NEED_DICT - (Z_BUF_ERROR)]; return Z_BUF_ERROR; } // User must not provide more input after the first FINISH: if (status == FINISH_STATE && strm.avail_in !== 0) { _strm.msg = z_errmsg[Z_NEED_DICT - (Z_BUF_ERROR)]; return Z_BUF_ERROR; } // Start a new block or continue the current one. if (strm.avail_in !== 0 || lookahead !== 0 || (flush != Z_NO_FLUSH && status != FINISH_STATE)) { bstate = -1; switch (config_table[level].func) { case STORED: bstate = deflate_stored(flush); break; case FAST: bstate = deflate_fast(flush); break; case SLOW: bstate = deflate_slow(flush); break; default: } if (bstate == FinishStarted || bstate == FinishDone) { status = FINISH_STATE; } if (bstate == NeedMore || bstate == FinishStarted) { if (strm.avail_out === 0) { last_flush = -1; // avoid BUF_ERROR next call, see above } return Z_OK; // If flush != Z_NO_FLUSH && avail_out === 0, the next call // of deflate should use the same flush parameter to make sure // that the flush is complete. So we don't have to output an // empty block here, this will be done at next call. This also // ensures that for a very small output buffer, we emit at most // one empty block. } if (bstate == BlockDone) { if (flush == Z_PARTIAL_FLUSH) { _tr_align(); } else { // FULL_FLUSH or SYNC_FLUSH _tr_stored_block(0, 0, false); // For a full flush, this empty block will be recognized // as a special marker by inflate_sync(). if (flush == Z_FULL_FLUSH) { // state.head[s.hash_size-1]=0; for (i = 0; i < hash_size/*-1*/; i++) // forget history head[i] = 0; } } strm.flush_pending(); if (strm.avail_out === 0) { last_flush = -1; // avoid BUF_ERROR at next call, see above return Z_OK; } } } if (flush != Z_FINISH) return Z_OK; return Z_STREAM_END; }; } // ZStream function ZStream() { var that = this; that.next_in_index = 0; that.next_out_index = 0; // that.next_in; // next input byte that.avail_in = 0; // number of bytes available at next_in that.total_in = 0; // total nb of input bytes read so far // that.next_out; // next output byte should be put there that.avail_out = 0; // remaining free space at next_out that.total_out = 0; // total nb of bytes output so far // that.msg; // that.dstate; } ZStream.prototype = { deflateInit : function(level, bits) { var that = this; that.dstate = new Deflate(); if (!bits) bits = MAX_BITS; return that.dstate.deflateInit(that, level, bits); }, deflate : function(flush) { var that = this; if (!that.dstate) { return Z_STREAM_ERROR; } return that.dstate.deflate(that, flush); }, deflateEnd : function() { var that = this; if (!that.dstate) return Z_STREAM_ERROR; var ret = that.dstate.deflateEnd(); that.dstate = null; return ret; }, deflateParams : function(level, strategy) { var that = this; if (!that.dstate) return Z_STREAM_ERROR; return that.dstate.deflateParams(that, level, strategy); }, deflateSetDictionary : function(dictionary, dictLength) { var that = this; if (!that.dstate) return Z_STREAM_ERROR; return that.dstate.deflateSetDictionary(that, dictionary, dictLength); }, // Read a new buffer from the current input stream, update the // total number of bytes read. All deflate() input goes through // this function so some applications may wish to modify it to avoid // allocating a large strm->next_in buffer and copying from it. // (See also flush_pending()). read_buf : function(buf, start, size) { var that = this; var len = that.avail_in; if (len > size) len = size; if (len === 0) return 0; that.avail_in -= len; buf.set(that.next_in.subarray(that.next_in_index, that.next_in_index + len), start); that.next_in_index += len; that.total_in += len; return len; }, // Flush as much pending output as possible. All deflate() output goes // through this function so some applications may wish to modify it // to avoid allocating a large strm->next_out buffer and copying into it. // (See also read_buf()). flush_pending : function() { var that = this; var len = that.dstate.pending; if (len > that.avail_out) len = that.avail_out; if (len === 0) return; // if (that.dstate.pending_buf.length <= that.dstate.pending_out || that.next_out.length <= that.next_out_index // || that.dstate.pending_buf.length < (that.dstate.pending_out + len) || that.next_out.length < (that.next_out_index + // len)) { // console.log(that.dstate.pending_buf.length + ", " + that.dstate.pending_out + ", " + that.next_out.length + ", " + // that.next_out_index + ", " + len); // console.log("avail_out=" + that.avail_out); // } that.next_out.set(that.dstate.pending_buf.subarray(that.dstate.pending_out, that.dstate.pending_out + len), that.next_out_index); that.next_out_index += len; that.dstate.pending_out += len; that.total_out += len; that.avail_out -= len; that.dstate.pending -= len; if (that.dstate.pending === 0) { that.dstate.pending_out = 0; } } }; // Deflater function Deflater(options) { var that = this; var z = new ZStream(); var bufsize = 512; var flush = Z_NO_FLUSH; var buf = new Uint8Array(bufsize); var level = options ? options.level : Z_DEFAULT_COMPRESSION; if (typeof level == "undefined") level = Z_DEFAULT_COMPRESSION; z.deflateInit(level); z.next_out = buf; that.append = function(data, onprogress) { var err, buffers = [], lastIndex = 0, bufferIndex = 0, bufferSize = 0, array; if (!data.length) return; z.next_in_index = 0; z.next_in = data; z.avail_in = data.length; do { z.next_out_index = 0; z.avail_out = bufsize; err = z.deflate(flush); if (err != Z_OK) throw new Error("deflating: " + z.msg); if (z.next_out_index) if (z.next_out_index == bufsize) buffers.push(new Uint8Array(buf)); else buffers.push(new Uint8Array(buf.subarray(0, z.next_out_index))); bufferSize += z.next_out_index; if (onprogress && z.next_in_index > 0 && z.next_in_index != lastIndex) { onprogress(z.next_in_index); lastIndex = z.next_in_index; } } while (z.avail_in > 0 || z.avail_out === 0); array = new Uint8Array(bufferSize); buffers.forEach(function(chunk) { array.set(chunk, bufferIndex); bufferIndex += chunk.length; }); return array; }; that.flush = function() { var err, buffers = [], bufferIndex = 0, bufferSize = 0, array; do { z.next_out_index = 0; z.avail_out = bufsize; err = z.deflate(Z_FINISH); if (err != Z_STREAM_END && err != Z_OK) throw new Error("deflating: " + z.msg); if (bufsize - z.avail_out > 0) buffers.push(new Uint8Array(buf.subarray(0, z.next_out_index))); bufferSize += z.next_out_index; } while (z.avail_in > 0 || z.avail_out === 0); z.deflateEnd(); array = new Uint8Array(bufferSize); buffers.forEach(function(chunk) { array.set(chunk, bufferIndex); bufferIndex += chunk.length; }); return array; }; } // 'zip' may not be defined in z-worker and some tests var env = global.zip || global; env.Deflater = env._jzlib_Deflater = Deflater; })(this); ================================================ FILE: elements.css ================================================ /* Hide Firefox's indicator when images are loading */ img:-moz-loading { visibility: hidden; } /* ------ UTILITY ---------- */ .hidden { display: none; } div.tip { color: #555; font-weight: normal; text-align: left; position: relative; display: inline-block; font-size: 13px; line-height: 1.45; background: #fff; border: 1px solid #a2a2a2; border-radius: 6px; padding: 8px 11px 9px 11px; bottom: 0px; left: -50%; margin-right: 9px; white-space: pre; box-shadow: 0 0px 10px rgba(0, 0, 0, 0.15); } /* tail */ .tip:after, .tip:before { top: 100%; border: solid transparent; content: " "; height: 0; width: 0; position: absolute; pointer-events: none; } /* tail fg */ .tip:after { border-color: rgba(136, 183, 213, 0); border-top-color: #fff; border-width: 8px; left: 50%; margin-left: -8px; } /* tail bg */ .tip:before { border-color: rgba(194, 225, 245, 0); border-top-color: #999; border-width: 9px; left: 50%; margin-left: -9px; } .tip-button { float: right; margin-top: 2px; display:inline-block; position: relative; z-index: 500; cursor: pointer; text-align: center; font-size: 13px; line-height: 1; font-weight: normal; color: #799FCB; } .tip-button .tip-anchor { bottom: 24px; left: 8px; } .tip-anchor { visibility: hidden; position: absolute; bottom: 0; left: 0; } .tip-button:hover { font-weight: bold; color: #033D6D; } .tip-button:hover * { visibility: visible; } .clicktext { display:inline-block; line-height: 1; cursor: pointer; border: none; outline: none; padding: 0; margin: 0; text-decoration:none; text-indent: 0px; } .file-control { position: absolute; top: -1000px; } /* -------- BUTTONS ---------- */ .header-btn { color: #fff; border: none; padding: 3px 7px 4px 7px; border-radius: 3px; margin-top: 5px; } .page-header .header-btn.disabled, .page-header .header-btn.disabled:hover { background-color: transparent; } .page-header .header-btn.active { background-color: black; } .btn.active { cursor: pointer; } .btn.selected, .btn.selected:hover { color: #aaa; background: none; } .btn.inline-btn:hover:not(.selected) { background-color: #FFFCDC; } .btn { text-align: center; padding: 4px 7px 5px 7px; border-radius: 4px; line-height: 1; display: inline-block; cursor: pointer; } .btn.disabled { cursor: default; } .dialog-btn { display: inline-block; margin-bottom: 4px; margin-top: 1px; font-size: 13px; color: white; min-width: 28px; } .inline-btn { margin: 0; border: 1px solid #999; padding: 1px 4px 3px 3px; } .text-btn { cursor: pointer; } .text-btn.disabled { cursor: auto; color: #999; } ================================================ FILE: encode.js ================================================ /** * https://github.com/giscafer/mapshaper-plus * 对坐标数据进行加密 * @author giscafer * @version 1.0 * @date 2016-06-04T01:48:33+0800 * 参考:https://github.com/ecomfe/echarts/blob/8eeb7e5abe207d0536c62ce1f4ddecc6adfdf85e/src/util/mapData/rawData/encode.js */ !(function (name, definition) { var hasDefine = typeof define === 'funciton', hasExports = typeof module !== 'undefined' && module.exports; if (hasDefine) { //AMD/CMD define(difinition); } else if (hasExports) { //Node.js module.exports = definition(); } else { this[name] = definition(); } })('Encoder', function () { function Encoder() { } Encoder.prototype.convert2Echarts = function (rawStr, fileName, type) { var results = ""; var json = JSON.parse(rawStr); // Meta tag json.UTF8Encoding = true; var features = json.features; // console.log(json); if (features) { features.forEach(function (feature) { var encodeOffsets = feature.geometry.encodeOffsets = []; var coordinates = feature.geometry.coordinates; if (feature.geometry.type === 'Polygon') { coordinates.forEach(function (coordinate, idx) { coordinates[idx] = encodePolygon( coordinate, encodeOffsets[idx] = [] ); }); } else if (feature.geometry.type === 'MultiPolygon') { coordinates.forEach(function (polygon, idx1) { encodeOffsets[idx1] = []; polygon.forEach(function (coordinate, idx2) { coordinates[idx1][idx2] = encodePolygon( coordinate, encodeOffsets[idx1][idx2] = [] ); }); }); } }); } else { var geometries = json.geometries; geometries.forEach(function (geometry) { var encodeOffsets = geometry.encodeOffsets = []; var coordinates = geometry.coordinates; if (geometry.type === 'Polygon') { coordinates.forEach(function (coordinate, idx) { coordinates[idx] = encodePolygon( coordinate, encodeOffsets[idx] = [] ); }); } else if (geometry.type === 'MultiPolygon') { coordinates.forEach(function (polygon, idx1) { encodeOffsets[idx1] = []; polygon.forEach(function (coordinate, idx2) { coordinates[idx1][idx2] = encodePolygon( coordinate, encodeOffsets[idx1][idx2] = [] ); }); }); } }); } if (type === 'json') { results = JSON.stringify(json); } else { results = addEchartsJsWrapper(JSON.stringify(json), fileName); } return results; }; function encodePolygon(coordinate, encodeOffsets) { var result = ''; var prevX = quantize(coordinate[0][0]); var prevY = quantize(coordinate[0][1]); // Store the origin offset encodeOffsets[0] = prevX; encodeOffsets[1] = prevY; for (var i = 0; i < coordinate.length; i++) { var point = coordinate[i]; result += encode(point[0], prevX); result += encode(point[1], prevY); prevX = quantize(point[0]); prevY = quantize(point[1]); } return result; } function addAMDWrapper(jsonStr) { return ['define(function() {', ' return ' + jsonStr + ';', '});'].join('\n'); } function addEchartsJsWrapper(jsonStr, fileName) { return ['(function (root, factory) {', " if (typeof define === 'function' && define.amd) {", " define(['exports', 'echarts'], factory);", " } else if (typeof exports === 'object' && typeof exports.nodeName !== 'string') {", " factory(exports, require('echarts'));", " } else {", " factory({}, root.echarts);", " }", " }(this, function (exports, echarts) {", " var log = function (msg) {", " if (typeof console !== 'undefined') {", " console && console.error && console.error(msg);", " }", " }", " if (!echarts) {", " log('ECharts is not Loaded');", " return;", " }", " if (!echarts.registerMap) {", " log('ECharts Map is not loaded')", " return;", " }", " echarts.registerMap('" + fileName + "'," + jsonStr, ' )}));'].join('\n'); } function encode(val, prev) { // Quantization val = quantize(val); // var tmp = val; // Delta val = val - prev; if (((val << 1) ^ (val >> 15)) + 64 === 8232) { //WTF, 8232 will get syntax error in js code val--; } // ZigZag val = (val << 1) ^ (val >> 15); // add offset and get unicode return String.fromCharCode(val + 64); // var tmp = {'tmp' : str}; // try{ // eval("(" + JSON.stringify(tmp) + ")"); // }catch(e) { // console.log(val + 64); // } } function quantize(val) { return Math.ceil(val * 1024); } return new Encoder(); }); ================================================ FILE: index.html ================================================ mapshaper plus

Unfortunately, mapshaper can't run in this web browser

For best results, try Google Chrome or Mozilla Firefox.
0 line intersections
Repair
================================================ FILE: manifest.js ================================================ /* replaced by a file manifest by mapshaper-gui server */ ================================================ FILE: mapshaper-gui.js ================================================ (function(){ var api = mapshaper; // assuming mapshaper is in global scope var utils = api.utils; var gui = api.gui = {}; var cli = api.cli; var geom = api.geom; var MapShaper = api.internal; var Bounds = api.internal.Bounds; var APIError = api.internal.APIError; var message = api.internal.message; // Replace error function in mapshaper lib var error = MapShaper.error = function() { stop.apply(null, utils.toArray(arguments)); }; // replace stop function var stop = MapShaper.stop = function() { // Show a popup error message, then throw an error var msg = gui.formatMessageArgs(arguments); gui.alert(msg); throw new Error(msg); }; function Handler(type, target, callback, listener, priority) { this.type = type; this.callback = callback; this.listener = listener || null; this.priority = priority || 0; this.target = target; } Handler.prototype.trigger = function(evt) { if (!evt) { evt = new EventData(this.type); evt.target = this.target; } else if (evt.target != this.target || evt.type != this.type) { error("[Handler] event target/type have changed."); } this.callback.call(this.listener, evt); } function EventData(type, target, data) { this.type = type; this.target = target; if (data) { utils.defaults(this, data); this.data = data; } } EventData.prototype.stopPropagation = function() { this.__stop__ = true; }; // Base class for objects that dispatch events function EventDispatcher() {} // @obj (optional) data object, gets mixed into event // @listener (optional) dispatch event only to this object EventDispatcher.prototype.dispatchEvent = function(type, obj, listener) { var evt; // TODO: check for bugs if handlers are removed elsewhere while firing var handlers = this._handlers; if (handlers) { for (var i = 0, len = handlers.length; i < len; i++) { var handler = handlers[i]; if (handler.type == type && (!listener || listener == handler.listener)) { if (!evt) { evt = new EventData(type, this, obj); } else if (evt.__stop__) { break; } handler.trigger(evt); } } } }; EventDispatcher.prototype.addEventListener = EventDispatcher.prototype.on = function(type, callback, context, priority) { context = context || this; priority = priority || 0; var handler = new Handler(type, this, callback, context, priority); // Insert the new event in the array of handlers according to its priority. var handlers = this._handlers || (this._handlers = []); var i = handlers.length; while (--i >= 0 && handlers[i].priority < handler.priority) {} handlers.splice(i+1, 0, handler); return this; }; // Remove an event handler. // @param {string} type Event type to match. // @param {function(BoundEvent)} callback Event handler function to match. // @param {*=} context Execution context of the event handler to match. // @return {number} Returns number of handlers removed (expect 0 or 1). EventDispatcher.prototype.removeEventListener = function(type, callback, context) { context = context || this; var count = this.removeEventListeners(type, callback, context); return count; }; // Remove event handlers; passing arguments can limit which listeners to remove // Returns nmber of handlers removed. EventDispatcher.prototype.removeEventListeners = function(type, callback, context) { var handlers = this._handlers; var newArr = []; var count = 0; for (var i = 0; handlers && i < handlers.length; i++) { var evt = handlers[i]; if ((!type || type == evt.type) && (!callback || callback == evt.callback) && (!context || context == evt.listener)) { count += 1; } else { newArr.push(evt); } } this._handlers = newArr; return count; }; EventDispatcher.prototype.countEventListeners = function(type) { var handlers = this._handlers, len = handlers && handlers.length || 0, count = 0; if (!type) return len; for (var i = 0; i < len; i++) { if (handlers[i].type === type) count++; } return count; }; var Env = (function() { var inNode = typeof module !== 'undefined' && !!module.exports; var inBrowser = typeof window !== 'undefined' && !inNode; var inPhantom = inBrowser && !!(window.phantom && window.phantom.exit); var ieVersion = inBrowser && /MSIE ([0-9]+)/.exec(navigator.appVersion) && parseInt(RegExp.$1) || NaN; return { iPhone : inBrowser && !!(navigator.userAgent.match(/iPhone/i)), iPad : inBrowser && !!(navigator.userAgent.match(/iPad/i)), canvas: inBrowser && !!document.createElement('canvas').getContext, inNode : inNode, inPhantom : inPhantom, inBrowser: inBrowser, ieVersion: ieVersion, ie: !isNaN(ieVersion) }; })(); var Browser = { getPageXY: function(el) { var x = 0, y = 0; if (el.getBoundingClientRect) { var box = el.getBoundingClientRect(); x = box.left - Browser.pageXToViewportX(0); y = box.top - Browser.pageYToViewportY(0); } else { var fixed = Browser.elementIsFixed(el); while (el) { x += el.offsetLeft || 0; y += el.offsetTop || 0; el = el.offsetParent; } if (fixed) { var offsX = -Browser.pageXToViewportX(0); var offsY = -Browser.pageYToViewportY(0); x += offsX; y += offsY; } } var obj = {x:x, y:y}; return obj; }, elementIsFixed: function(el) { // get top-level offsetParent that isn't body (cf. Firefox) var body = document.body; while (el && el != body) { var parent = el; el = el.offsetParent; } // Look for position:fixed in the computed style of the top offsetParent. // var styleObj = parent && (parent.currentStyle || window.getComputedStyle && window.getComputedStyle(parent, '')) || {}; var styleObj = parent && Browser.getElementStyle(parent) || {}; return styleObj['position'] == 'fixed'; }, pageXToViewportX: function(x) { return x - window.pageXOffset; }, pageYToViewportY: function(y) { return y - window.pageYOffset; }, getElementStyle: function(el) { return el.currentStyle || window.getComputedStyle && window.getComputedStyle(el, '') || {}; }, getClassNameRxp: function(cname) { return new RegExp("(^|\\s)" + cname + "(\\s|$)"); }, hasClass: function(el, cname) { var rxp = this.getClassNameRxp(cname); return el && rxp.test(el.className); }, addClass: function(el, cname) { var classes = el.className; if (!classes) { classes = cname; } else if (!this.hasClass(el, cname)) { classes = classes + ' ' + cname; } el.className = classes; }, removeClass: function(el, cname) { var rxp = this.getClassNameRxp(cname); el.className = el.className.replace(rxp, "$2"); }, replaceClass: function(el, c1, c2) { var r1 = this.getClassNameRxp(c1); el.className = el.className.replace(r1, '$1' + c2 + '$2'); }, mergeCSS: function(s1, s2) { var div = this._cssdiv; if (!div) { div = this._cssdiv = document.createElement('div'); } div.style.cssText = s1 + ";" + s2; // extra ';' for ie, which may leave off final ';' return div.style.cssText; }, addCSS: function(el, css) { el.style.cssText = Browser.mergeCSS(el.style.cssText, css); }, // Return: HTML node reference or null // Receive: node reference or id or "#" + id getElement: function(ref) { var el; if (typeof ref == 'string') { if (ref.charAt(0) == '#') { ref = ref.substr(1); } if (ref == 'body') { el = document.getElementsByTagName('body')[0]; } else { el = document.getElementById(ref); } } else if (ref && ref.nodeType !== void 0) { el = ref; } return el || null; }, undraggable: function(el) { el.ondragstart = function(){return false;}; el.draggable = false; } }; Browser.onload = function(handler) { if (document.readyState == 'complete') { handler(); } else { window.addEventListener('load', handler); } }; // See https://github.com/janl/mustache.js/blob/master/mustache.js utils.htmlEscape = (function() { var entityMap = { '&': '&', '<': '<', '>': '>', '"': '"', "'": ''', '/': '/' }; return function(s) { return String(s).replace(/[&<>"'\/]/g, function(s) { return entityMap[s]; }); }; }()); var classSelectorRE = /^\.([\w-]+)$/, idSelectorRE = /^#([\w-]+)$/, tagSelectorRE = /^[\w-]+$/, tagOrIdSelectorRE = /^#?[\w-]+$/; function Elements(sel) { if ((this instanceof Elements) == false) { return new Elements(sel); } this.elements = []; this.select(sel); this.tmp = new El(); } Elements.prototype = { size: function() { return this.elements.length; }, select: function(sel) { this.elements = Elements.__select(sel); return this; }, addClass: function(className) { this.forEach(function(el) { el.addClass(className); }); return this; }, removeClass: function(className) { this.forEach(function(el) { el.removeClass(className); }) return this; }, forEach: function(callback, ctx) { var tmp = this.tmp; for (var i=0, len=this.elements.length; i) id: function(id) { if (id) { this.el.id = id; return this; } return this.el.id; }, findChild: function(sel) { var node = Elements.__select(sel, this.el)[0]; if (!node) error("Unmatched selector:", sel); return new El(node); }, appendTo: function(ref) { var parent = ref instanceof El ? ref.el : Browser.getElement(ref); if (this._sibs) { for (var i=0, len=this._sibs.length; i= 1; if (!running) { // interrupted busy = false; return; } if (done) running = false; self.dispatchEvent('tick', { elapsed: elapsed, pct: pct, done: done, time: now, tickTime: now - tickTime }); busy = false; if (running) startTick(now); } } utils.inherit(Timer, EventDispatcher); function Tween(ease) { var self = this, timer = new Timer(), start, end; timer.on('tick', onTick); this.start = function(a, b, duration) { start = a; end = b; timer.start(duration || 500); }; function onTick(e) { var pct = ease ? ease(e.pct) : e.pct, val = end * pct + start * (1 - pct); self.dispatchEvent('change', {value: val}); } } utils.inherit(Tween, EventDispatcher); Tween.sineInOut = function(n) { return 0.5 - Math.cos(n * Math.PI) / 2; }; Tween.quadraticOut = function(n) { return 1 - Math.pow((1 - n), 2); }; // @mouse: MouseArea object function MouseWheel(mouse) { var self = this, prevWheelTime = 0, currDirection = 0, timer = new Timer().addEventListener('tick', onTick), sustainTime = 60, fadeTime = 80; if (window.onmousewheel !== undefined) { // ie, webkit window.addEventListener('mousewheel', handleWheel); } else { // firefox window.addEventListener('DOMMouseScroll', handleWheel); } function handleWheel(evt) { var direction; if (evt.wheelDelta) { direction = evt.wheelDelta > 0 ? 1 : -1; } else if (evt.detail) { direction = evt.detail > 0 ? -1 : 1; } if (!mouse.isOver() || !direction) return; evt.preventDefault(); prevWheelTime = +new Date(); if (!currDirection) { self.dispatchEvent('mousewheelstart'); } currDirection = direction; timer.start(sustainTime + fadeTime); } function onTick(evt) { var elapsed = evt.time - prevWheelTime, fadeElapsed = elapsed - sustainTime, scale = evt.tickTime / 25, obj; if (evt.done) { currDirection = 0; } else { if (fadeElapsed > 0) { // Decelerate if the timer fires during 'fade time' (for smoother zooming) scale *= Tween.quadraticOut((fadeTime - fadeElapsed) / fadeTime); } obj = utils.extend({direction: currDirection, multiplier: scale}, mouse.mouseData()); self.dispatchEvent('mousewheel', obj); } } } utils.inherit(MouseWheel, EventDispatcher); function MouseArea(element) { var pos = new ElementPosition(element), _areaPos = pos.position(), _self = this, _dragging = false, _isOver = false, _prevEvt, // _moveEvt, _downEvt; pos.on('change', function() {_areaPos = pos.position()}); // TODO: think about touch events document.addEventListener('mousemove', onMouseMove); document.addEventListener('mousedown', onMouseDown); document.addEventListener('mouseup', onMouseUp); element.addEventListener('mouseover', onAreaEnter); element.addEventListener('mousemove', onAreaEnter); element.addEventListener('mouseout', onAreaOut); element.addEventListener('mousedown', onAreaDown); element.addEventListener('dblclick', onAreaDblClick); function onAreaDown(e) { e.preventDefault(); // prevent text selection cursor on drag } function onAreaEnter() { if (!_isOver) { _isOver = true; _self.dispatchEvent('enter'); } } function onAreaOut(e) { _isOver = false; _self.dispatchEvent('leave'); } function onMouseUp(e) { var evt = procMouseEvent(e), elapsed, dx, dy; if (_dragging) { _dragging = false; _self.dispatchEvent('dragend', evt); } if (_downEvt) { elapsed = evt.time - _downEvt.time; dx = evt.pageX - _downEvt.pageX; dy = evt.pageY - _downEvt.pageY; if (_isOver && elapsed < 500 && Math.sqrt(dx * dx + dy * dy) < 6) { _self.dispatchEvent('click', evt); } _downEvt = null; } } function onMouseDown(e) { if (e.button != 2 && e.which != 3) { // ignore right-click _downEvt = procMouseEvent(e); } } function onMouseMove(e) { var evt = procMouseEvent(e); if (!_dragging && _downEvt && _downEvt.hover) { _dragging = true; _self.dispatchEvent('dragstart', evt); } if (_dragging) { var obj = { dragX: evt.pageX - _downEvt.pageX, dragY: evt.pageY - _downEvt.pageY }; _self.dispatchEvent('drag', utils.extend(obj, evt)); } else { _self.dispatchEvent('hover', evt); } } function onAreaDblClick(e) { if (_isOver) _self.dispatchEvent('dblclick', procMouseEvent(e)); } function procMouseEvent(e) { var pageX = e.pageX, pageY = e.pageY, prev = _prevEvt; _prevEvt = { shiftKey: e.shiftKey, time: +new Date, pageX: pageX, pageY: pageY, hover: _isOver, x: pageX - _areaPos.pageX, y: pageY - _areaPos.pageY, dx: prev ? pageX - prev.pageX : 0, dy: prev ? pageY - prev.pageY : 0 }; return _prevEvt; } this.isOver = function() { return _isOver; } this.isDown = function() { return !!_downEvt; } this.mouseData = function() { return utils.extend({}, _prevEvt); } } utils.inherit(MouseArea, EventDispatcher); function ErrorMessages(model) { var el; model.addMode('alert', function() {}, turnOff); function turnOff() { if (el) { el.remove(); el = null; } } return function(str) { var infoBox; if (el) return; el = El('div').appendTo('body').addClass('error-wrapper'); infoBox = El('div').appendTo(el).addClass('error-box info-box'); El('p').addClass('error-message').appendTo(infoBox).html(str); El('div').addClass("btn dialog-btn").appendTo(infoBox).html('close').on('click', model.clearMode); model.enterMode('alert'); }; } api.enableLogging(); gui.browserIsSupported = function() { return typeof ArrayBuffer != 'undefined' && typeof Blob != 'undefined' && typeof File != 'undefined'; }; gui.formatMessageArgs = function(args) { // remove cli annotation (if present) return MapShaper.formatLogArgs(args).replace(/^\[[^\]]+\] ?/, ''); }; gui.handleDirectEvent = function(cb) { return function(e) { if (e.target == this) cb(); }; }; gui.getInputElement = function() { var el = document.activeElement; return (el && (el.tagName == 'INPUT' || el.contentEditable == 'true')) ? el : null; }; gui.selectElement = function(el) { var range = document.createRange(), sel = getSelection(); range.selectNodeContents(el); sel.removeAllRanges(); sel.addRange(range); }; gui.blurActiveElement = function() { var el = gui.getInputElement(); if (el) el.blur(); }; // Filter out delayed click events, e.g. so users can highlight and copy text gui.onClick = function(el, cb) { var time; el.on('mousedown', function() { time = +new Date(); }); el.on('mouseup', function(e) { if (+new Date() - time < 300) cb(e); }); }; // TODO: switch all ClickText to ClickText2 // @ref Reference to an element containing a text node function ClickText2(ref) { var self = this; var selected = false; var el = El(ref).on('mousedown', init); function init() { el.removeEventListener('mousedown', init); el.attr('contentEditable', true) .attr('spellcheck', false) .attr('autocorrect', false) .on('focus', function(e) { el.addClass('editing'); selected = false; }).on('blur', function(e) { el.removeClass('editing'); self.dispatchEvent('change'); getSelection().removeAllRanges(); }).on('keydown', function(e) { if (e.keyCode == 13) { // enter e.stopPropagation(); e.preventDefault(); this.blur(); } }).on('click', function(e) { if (!selected && getSelection().isCollapsed) { gui.selectElement(el.node()); } selected = true; e.stopPropagation(); }); } this.value = function(str) { if (utils.isString(str)) { el.node().textContent = str; } else { return el.node().textContent; } }; } utils.inherit(ClickText2, EventDispatcher); // @ref reference to a text input element function ClickText(ref) { var _el = El(ref); var _self = this; var _max = Infinity, _min = -Infinity, _formatter = function(v) {return String(v);}, _validator = function(v) {return !isNaN(v);}, _parser = function(s) {return parseFloat(s);}, _value = 0; _el.on('blur', onblur); _el.on('keydown', onpress); function onpress(e) { if (e.keyCode == 27) { // esc _self.value(_value); // reset input field to current value _el.el.blur(); } else if (e.keyCode == 13) { // enter _el.el.blur(); } } // Validate input contents. // Update internal value and fire 'change' if valid // function onblur() { var val = _parser(_el.el.value); if (val === _value) { // return; } if (_validator(val)) { _self.value(val); _self.dispatchEvent('change', {value:_self.value()}); } else { _self.value(_value); _self.dispatchEvent('error'); // TODO: improve } } this.bounds = function(min, max) { _min = min; _max = max; return this; }; this.validator = function(f) { _validator = f; return this; }; this.formatter = function(f) { _formatter = f; return this; }; this.parser = function(f) { _parser = f; return this; }; this.value = function(arg) { if (arg == void 0) { // var valStr = this.el.value; // return _parser ? _parser(valStr) : parseFloat(valStr); return _value; } var val = utils.clamp(arg, _min, _max); if (!_validator(val)) { error("ClickText#value() invalid value:", arg); } else { _value = val; } _el.el.value = _formatter(val); return this; }; } utils.inherit(ClickText, EventDispatcher); function Checkbox(ref) { var _el = El(ref); } utils.inherit(Checkbox, EventDispatcher); function SimpleButton(ref) { var _el = El(ref), _self = this, _active = !_el.hasClass('disabled'); _el.on('click', function(e) { if (_active) _self.dispatchEvent('click'); return false; }); this.active = function(a) { if (a === void 0) return _active; if (a !== _active) { _active = a; _el.toggleClass('disabled'); } return this; }; } utils.inherit(SimpleButton, EventDispatcher); function ModeButton(el, name, model) { var btn = El(el), active = false; model.on('mode', function(e) { active = e.name == name; if (active) { btn.addClass('active'); } else { btn.removeClass('active'); } }); btn.on('click', function() { model.enterMode(active ? null : name); }); } function draggable(ref) { var xdown, ydown; var el = El(ref), dragging = false, obj = new EventDispatcher(); Browser.undraggable(el.node()); el.on('mousedown', function(e) { xdown = e.pageX; ydown = e.pageY; window.addEventListener('mousemove', onmove); window.addEventListener('mouseup', onrelease); }); function onrelease(e) { window.removeEventListener('mousemove', onmove); window.removeEventListener('mouseup', onrelease); if (dragging) { dragging = false; obj.dispatchEvent('dragend'); } } function onmove(e) { if (!dragging) { dragging = true; obj.dispatchEvent('dragstart'); } obj.dispatchEvent('drag', {dx: e.pageX - xdown, dy: e.pageY - ydown}); } return obj; } function Slider(ref, opts) { var _el = El(ref); var _self = this; var defaults = { space: 7 }; opts = utils.extend(defaults, opts); var _pct = 0; var _track, _handle, _handleLeft = opts.space; function size() { return _track ? _track.width() - opts.space * 2 : 0; } this.track = function(ref) { if (ref && !_track) { _track = El(ref); _handleLeft = _track.el.offsetLeft + opts.space; updateHandlePos(); } return _track; }; this.handle = function(ref) { var startX; if (ref && !_handle) { _handle = El(ref); draggable(_handle) .on('drag', function(e) { setHandlePos(startX + e.dx, true); }) .on('dragstart', function(e) { startX = position(); _self.dispatchEvent('start'); }) .on('dragend', function(e) { _self.dispatchEvent('end'); }); updateHandlePos(); } return _handle; }; function position() { return Math.round(_pct * size()); } this.pct = function(pct) { if (pct >= 0 && pct <= 1) { _pct = pct; updateHandlePos(); } return _pct; }; function setHandlePos(x, fire) { x = utils.clamp(x, 0, size()); var pct = x / size(); if (pct != _pct) { _pct = pct; _handle.css('left', _handleLeft + x); _self.dispatchEvent('change', {pct: _pct}); } } function updateHandlePos() { var x = _handleLeft + Math.round(position()); if (_handle) _handle.css('left', x); } } utils.inherit(Slider, EventDispatcher); var SimplifyControl = function(model) { var control = new EventDispatcher(); var _value = 1; var el = El('#simplify-control-wrapper'); var menu = El('#simplify-options'); var slider, text; new SimpleButton('#simplify-options .submit-btn').on('click', onSubmit); new SimpleButton('#simplify-options .cancel-btn').on('click', function() { if (el.visible()) { // cancel just hides menu if slider is visible menu.hide(); } else { model.clearMode(); } }); new SimpleButton('#simplify-settings-btn').on('click', function() { if (menu.visible()) { menu.hide(); } else { initMenu(); } }); new ModeButton('#simplify-btn', 'simplify', model); model.addMode('simplify', turnOn, turnOff); model.on('select', function() { if (model.getMode() == 'simplify') model.clearMode(); }); // exit simplify mode when user clicks off the visible part of the menu menu.on('click', gui.handleDirectEvent(model.clearMode)); slider = new Slider("#simplify-control .slider"); slider.handle("#simplify-control .handle"); slider.track("#simplify-control .track"); slider.on('change', function(e) { var pct = fromSliderPct(e.pct); text.value(pct); onchange(pct); }); slider.on('start', function(e) { control.dispatchEvent('simplify-start'); }).on('end', function(e) { control.dispatchEvent('simplify-end'); }); text = new ClickText("#simplify-control .clicktext"); text.bounds(0, 1); text.formatter(function(val) { if (isNaN(val)) return '-'; var pct = val * 100; var decimals = 0; if (pct <= 0) decimals = 1; else if (pct < 0.001) decimals = 4; else if (pct < 0.01) decimals = 3; else if (pct < 1) decimals = 2; else if (pct < 100) decimals = 1; return utils.formatNumber(pct, decimals) + "%"; }); text.parser(function(s) { return parseFloat(s) / 100; }); text.value(0); text.on('change', function(e) { var pct = e.value; slider.pct(toSliderPct(pct)); control.dispatchEvent('simplify-start'); onchange(pct); control.dispatchEvent('simplify-end'); }); function turnOn() { var target = model.getEditingLayer(); if (!MapShaper.layerHasPaths(target.layer)) { gui.alert("This layer can not be simplified"); return; } if (target.dataset.arcs.getVertexData().zz) { // TODO: try to avoid calculating pct (slow); showSlider(); // need to show slider before setting; TODO: fix control.value(target.dataset.arcs.getRetainedPct()); } else { initMenu(); } } function initMenu() { var dataset = model.getEditingLayer().dataset; var showPlanarOpt = !dataset.arcs.isPlanar(); var opts = MapShaper.getStandardSimplifyOpts(dataset, dataset.info && dataset.info.simplify); El('#planar-opt-wrapper').node().style.display = showPlanarOpt ? 'block' : 'none'; El('#planar-opt').node().checked = !opts.spherical; El("#import-retain-opt").node().checked = opts.keep_shapes; El("#simplify-options input[value=" + opts.method + "]").node().checked = true; menu.show(); } function turnOff() { menu.hide(); control.reset(); } function onSubmit() { var dataset = model.getEditingLayer().dataset; var showMsg = dataset.arcs && dataset.arcs.getPointCount() > 1e6; var delay = 0; if (showMsg) { delay = 35; gui.showProgressMessage('Calculating'); } menu.hide(); setTimeout(function() { var opts = getSimplifyOptions(); mapshaper.simplify(dataset, opts); model.updated({ // use presimplify flag if no vertices are removed // (to trigger map redraw without recalculating intersections) presimplify: opts.pct == 1, simplify: opts.pct < 1 }); showSlider(); gui.clearProgressMessage(); }, delay); } function showSlider() { el.show(); El('body').addClass('simplify'); // for resizing, hiding layer label, etc. } function getSimplifyOptions() { var method = El('#simplify-options input[name=method]:checked').attr('value') || null; return { method: method, pct: _value, no_repair: true, keep_shapes: !!El("#import-retain-opt").node().checked, planar: !!El('#planar-opt').node().checked }; } function toSliderPct(p) { p = Math.sqrt(p); var pct = 1 - p; return pct; } function fromSliderPct(p) { var pct = 1 - p; return pct * pct; } function onchange(val) { if (_value != val) { _value = val; control.dispatchEvent('change', {value:val}); } } control.reset = function() { control.value(1); el.hide(); menu.hide(); El('body').removeClass('simplify'); }; control.value = function(val) { if (!isNaN(val)) { // TODO: validate _value = val; slider.pct(toSliderPct(val)); text.value(val); } return _value; }; control.value(_value); return control; }; // Assume zip.js is loaded and zip is defined globally zip.workerScripts = { // deflater: ['z-worker.js', 'deflate.js'], // use zip.js deflater // TODO: find out why it was necessary to rename pako_deflate.min.js deflater: ['z-worker.js', 'pako.deflate.js', 'codecs.js'], inflater: ['z-worker.js', 'pako.inflate.js', 'codecs.js'] }; // @file: Zip file // @cb: function(err, ) // gui.readZipFile = function(file, cb) { var _files = []; zip.createReader(new zip.BlobReader(file), importZipContent, onError); function onError(err) { cb(err); } function onDone() { cb(null, _files); } function importZipContent(reader) { var _entries; reader.getEntries(readEntries); function readEntries(entries) { _entries = entries || []; readNext(); } function readNext() { if (_entries.length > 0) { readEntry(_entries.pop()); } else { reader.close(); onDone(); } } function readEntry(entry) { var filename = entry.filename, isValid = !entry.directory && gui.isReadableFileType(filename) && !/^__MACOSX/.test(filename); // ignore "resource-force" files if (isValid) { entry.getData(new zip.BlobWriter(), function(file) { file.name = filename; // Give the Blob a name, like a File object _files.push(file); readNext(); }); } else { readNext(); } } } }; gui.showProgressMessage = function(msg) { if (!gui.progressMessage) { gui.progressMessage = El('div').id('progress-message') .appendTo('body'); } El('
').text(msg).appendTo(gui.progressMessage.empty().show()); }; gui.clearProgressMessage = function() { if (gui.progressMessage) gui.progressMessage.hide(); }; gui.parseFreeformOptions = function(raw, cmd) { var str = raw.trim(), parsed; if (!str) { return {}; } if (!/^-/.test(str)) { str = '-' + cmd + ' ' + str; } parsed = MapShaper.parseCommands(str); if (!parsed.length || parsed[0].name != cmd) { stop("Unable to parse command line options"); } return parsed[0].options; }; // tests if filename is a type that can be used gui.isReadableFileType = function(filename) { var ext = utils.getFileExtension(filename).toLowerCase(); return !!MapShaper.guessInputFileType(filename) || MapShaper.couldBeDsvFile(filename) || MapShaper.isZipFile(filename); }; // @cb function() function DropControl(cb) { var el = El('body'); el.on('dragleave', ondrag); el.on('dragover', ondrag); el.on('drop', ondrop); function ondrag(e) { // blocking drag events enables drop event e.preventDefault(); } function ondrop(e) { e.preventDefault(); cb(e.dataTransfer.files); } } // @el DOM element for select button // @cb function() function FileChooser(el, cb) { var btn = El(el).on('click', function() { input.el.click(); }); var input = El('form') .addClass('file-control').appendTo('body') .newChild('input') .attr('type', 'file') .attr('multiple', 'multiple') .on('change', onchange); function onchange(e) { var files = e.target.files; // files may be undefined (e.g. if user presses 'cancel' after a file has been selected) if (files) { // disable the button while files are being processed btn.addClass('selected'); input.attr('disabled', true); cb(files); btn.removeClass('selected'); input.attr('disabled', false); } } } function ImportControl(model) { new SimpleButton('#import-buttons .submit-btn').on('click', submitFiles); new SimpleButton('#import-buttons .cancel-btn').on('click', model.clearMode); var importCount = 0; var queuedFiles = []; model.addMode('import', turnOn, turnOff); new DropControl(receiveFiles); new FileChooser('#file-selection-btn', receiveFiles); new FileChooser('#import-buttons .add-btn', receiveFiles); new FileChooser('#add-file-btn', receiveFiles); model.enterMode('import'); model.on('mode', function(e) { // re-open import opts if leaving alert or console modes and nothing has been imported yet if (!e.name && importCount === 0) { model.enterMode('import'); } }); function findMatchingShp(filename) { var shpName = utils.replaceFileExtension(filename, 'shp'); return model.getDatasets().filter(function(d) { return shpName == d.info.input_files[0]; }); } function turnOn() { if (mapshaper.manifest) { downloadFiles(mapshaper.manifest); mapshaper.manifest = null; } else { El('#import-options').show(); } } function close() { El('#fork-me').hide(); El('#import-intro').hide(); // only show intro before first import El('#import-buttons').show(); El('#import-options').hide(); } function turnOff() { gui.clearProgressMessage(); clearFiles(); close(); } function clearFiles() { queuedFiles = []; El('#dropped-file-list .file-list').empty(); El('#dropped-file-list').hide(); } function addFiles(files) { var index = {}; queuedFiles = queuedFiles.concat(files).reduce(function(memo, f) { // filter out unreadable types and dupes if (gui.isReadableFileType(f.name) && f.name in index === false) { index[f.name] = true; memo.push(f); } return memo; }, []); // sort alphabetically by filename queuedFiles.sort(function(a, b) { return a.name > b.name ? 1 : -1; }); } function showQueuedFiles() { var list = El('#dropped-file-list .file-list').empty(); El('#dropped-file-list').show(); queuedFiles.forEach(function(f) { El('

').text(f.name).appendTo(El("#dropped-file-list .file-list")); }); } function receiveFiles(files) { var prevSize = queuedFiles.length; addFiles(utils.toArray(files)); if (queuedFiles.length === 0) return; model.enterMode('import'); if (importCount === 0 && prevSize === 0 && containsImmediateFile(queuedFiles)) { // if the first batch of files will be imported, process right away submitFiles(); } else { showQueuedFiles(); El('#import-buttons').show(); } } // Check if an array of File objects contains a file that should be imported right away function containsImmediateFile(files) { return utils.some(files, function(f) { var type = MapShaper.guessInputFileType(f.name); return type == 'shp' || type == 'json'; }); } function submitFiles() { close(); readNext(); } function readNext() { if (queuedFiles.length > 0) { readFile(queuedFiles.pop()); // read in rev. alphabetic order, so .shp comes before .dbf } else { model.clearMode(); } } function getImportOpts() { var freeform = El('#import-options .advanced-options').node().value, opts = gui.parseFreeformOptions(freeform, 'i'); opts.no_repair = !El("#repair-intersections-opt").node().checked; opts.auto_snap = !!El("#snap-points-opt").node().checked; return opts; } function loadFile(file, cb) { var reader = new FileReader(), isBinary = MapShaper.isBinaryFile(file.name); // no callback on error -- fix? reader.onload = function(e) { cb(null, reader.result); }; if (isBinary) { reader.readAsArrayBuffer(file); } else { // TODO: improve to handle encodings, etc. reader.readAsText(file, 'UTF-8'); } } // @file a File object function readFile(file) { if (MapShaper.isZipFile(file.name)) { readZipFile(file); } else { loadFile(file, function(err, content) { if (err) { readNext(); } else { readFileContent(file.name, content); } }); } } function readFileContent(name, content) { var type = MapShaper.guessInputType(name, content), importOpts = getImportOpts(), matches = findMatchingShp(name), dataset, lyr; // TODO: refactor if (type == 'dbf' && matches.length > 0) { // find an imported .shp layer that is missing attribute data // (if multiple matches, try to use the most recently imported one) dataset = matches.reduce(function(memo, d) { if (!d.layers[0].data) { memo = d; } return memo; }, null); if (dataset) { lyr = dataset.layers[0]; lyr.data = new MapShaper.ShapefileTable(content, importOpts.encoding); if (lyr.shapes && lyr.data.size() != lyr.shapes.length) { stop("Different number of records in .shp and .dbf files"); } if (!lyr.geometry_type) { // kludge: trigger display of table cells if .shp has null geometry model.updated(null, lyr, dataset); } readNext(); return; } } if (type == 'prj') { // assumes that .shp has been imported first matches.forEach(function(d) { if (!d.info.output_prj && !d.info.input_prj) { d.info.input_prj = content; } }); readNext(); return; } importFileContent(type, name, content, importOpts); } function importFileContent(type, path, content, importOpts) { var size = content.byteLength || content.length, // ArrayBuffer or string showMsg = size > 4e7, // don't show message if dataset is small delay = 0; importOpts.files = [path]; // TODO: try to remove this if (showMsg) { gui.showProgressMessage('Importing'); delay = 35; } setTimeout(function() { var dataset = MapShaper.importFileContent(content, path, importOpts); dataset.info.no_repair = importOpts.no_repair; model.addDataset(dataset); importCount++; readNext(); }, delay); } function readZipFile(file) { gui.showProgressMessage('Importing'); setTimeout(function() { gui.readZipFile(file, function(err, files) { if (err) { console.log("Zip file loading failed:"); throw err; } // don't try to import .txt files from zip files // (these would be parsed as dsv and throw errows) files = files.filter(function(f) { return !/\.txt$/i.test(f.name); }); addFiles(files); readNext(); }); }, 35); } function downloadFiles(paths, opts) { paths = paths.filter(function(f) { return gui.isReadableFileType(f); }); utils.reduceAsync(paths, [], downloadNextFile, function(err, files) { if (err || !files.length) { model.clearMode(); } else { addFiles(files); submitFiles(); } }); } function downloadNextFile(memo, filepath, next) { var req = new XMLHttpRequest(); req.responseType = 'blob'; req.addEventListener('load', function(e) { var blob = req.response; blob.name = filepath; memo.push(blob); next(null, memo); }); req.addEventListener('error', function(e) { next('error'); }); req.open('GET', '/data/' + filepath); req.send(); } } // Export buttons and their behavior var ExportControl = function(model) { var downloadSupport = typeof URL != 'undefined' && URL.createObjectURL && typeof document.createElement("a").download != "undefined" || !!window.navigator.msSaveBlob; var unsupportedMsg = "Exporting is not supported in this browser"; var menu = El('#export-options').on('click', gui.handleDirectEvent(model.clearMode)); var datasets = []; // array of exportable layers grouped by dataset var anchor, blobUrl; new SimpleButton('#export-options .cancel-btn').on('click', model.clearMode); if (!downloadSupport) { El('#export-btn').on('click', function() { gui.alert(unsupportedMsg); }); MapShaper.writeFiles = function() { error(unsupportedMsg); }; } else { anchor = menu.newChild('a').attr('href', '#').node(); initExportButton(); model.addMode('export', turnOn, turnOff); new ModeButton('#export-btn', 'export', model); MapShaper.writeFiles = function(files, opts, done) { if (!utils.isArray(files) || files.length === 0) { done("Nothing to export"); }else if (opts.isEchartsType) { var content=""; var fileFormat=".json"; var filename=files[0].filename.replace('.json',''); if(opts.isEchartsType==='json'){ fileFormat=".json"; content=Encoder.convert2Echarts(files[0].content, filename,'json'); }else{ fileFormat=".js"; content=Encoder.convert2Echarts(files[0].content, filename,'js'); } saveBlob(filename+fileFormat, new Blob([content]), done); }else if (files.length == 1) { saveBlob(files[0].filename, new Blob([files[0].content]), done); } else { filename = utils.getCommonFileBase(utils.pluck(files, 'filename')) || "output"; saveZipFile(filename + ".zip", files, done); } }; } function initLayerMenu() { // init layer menu with current editing layer selected var list = El('#export-layer-list').empty(); var template = ''; var datasets = model.getDatasets().map(initDataset); var hideLayers = datasets.length == 1 && datasets[0].layers.length < 2; El('#export-layers').css('display', hideLayers ? 'none' : 'block'); return datasets; function initDataset(dataset) { var layers = dataset.layers.map(function(lyr) { var html = utils.format(template, lyr.name || '[unnamed layer]'); var box = El('div').html(html).appendTo(list).findChild('input').node(); return { checkbox: box, layer: lyr }; }); return { dataset: dataset, layers: layers }; } } function getInputFormats() { return model.getDatasets().reduce(function(memo, d) { var fmt = d.info && d.info.input_format; if (fmt) memo.push(fmt); return memo; }, []); } function getDefaultExportFormat() { var dataset = model.getEditingLayer().dataset; return dataset.info && dataset.info.input_format || 'geojson'; } function initFormatMenu() { var defaults = ['shapefile', 'geojson', 'topojson', 'dsv','svg','echartsmapjson','echartsmapjs']; var formats = utils.uniq(defaults.concat(getInputFormats())); var items = formats.map(function(fmt) { return utils.format('

', fmt, MapShaper.getFormatName(fmt)); }); El('#export-formats').html(items.join('\n')); El('#export-formats input[value="' + getDefaultExportFormat() + '"]').node().checked = true; } function turnOn() { datasets = initLayerMenu(); initFormatMenu(); menu.show(); } function turnOff() { menu.hide(); } function getSelectedFormat() { return El('#export-formats input:checked').node().value; } function getSelectedLayers() { var selections = datasets.reduce(function(memo, obj) { var dataset = obj.dataset; var selection = obj.layers.reduce(reduceLayer, []); if (selection.length > 0) { memo.push(utils.defaults({layers: selection}, dataset)); } return memo; }, []); function reduceLayer(memo, obj) { if (obj.checkbox.checked) { // shallow-copy layer, so uniqified filenames do not affect original layers memo.push(utils.extend({}, obj.layer)); } return memo; } return selections; } function initExportButton() { new SimpleButton('#save-btn').on('click', function() { gui.showProgressMessage('Exporting'); model.clearMode(); setTimeout(function() { exportMenuSelection(function(err) { if (err) { if (utils.isString(err)) { gui.alert(err); } else { // stack seems to change if Error is logged directly console.error(err.stack); gui.alert("Export failed for an unknown reason"); } } // hide message after a delay, so it doesn't just flash for an instant. setTimeout(gui.clearProgressMessage, err ? 0 : 400); }); }, 20); }); } // @done function(string|Error|null) function exportMenuSelection(done) { var opts, files, datasets; try { opts = gui.parseFreeformOptions(El('#export-options .advanced-options').node().value, 'o'); if (!opts.format) opts.format = getSelectedFormat(); if(opts.format==='echartsmapjson'){ opts.format='geojson'; opts.isEchartsType='json'; }else if(opts.format==='echartsmapjs'){ opts.format='geojson'; opts.isEchartsType='js'; } // ignoring command line "target" option datasets = getSelectedLayers(); if (isMultiLayerFormat(opts.format)) { // merge multiple datasets into one for export as SVG or TopoJSON if (datasets.length > 1) { datasets = [MapShaper.mergeDatasetsForExport(datasets)]; if (opts.format == 'topojson') { // Build topology, in case user has loaded several // files derived from the same source, with matching coordinates // (Downsides: useless work if geometry is unrelated; // could create many small arcs if layers are partially related) api.buildTopology(datasets[0]); } // KLUDGE let exporter know that cloning is not needed // (because shape data was deep-copied during merge) opts.cloned = true; } } else { MapShaper.assignUniqueLayerNames2(datasets); } files = datasets.reduce(function(memo, dataset) { var output = MapShaper.exportFileContent(dataset, opts); return memo.concat(output); }, []); // multiple output files will be zipped, need unique names MapShaper.assignUniqueFileNames(files); } catch(e) { return done(e); } MapShaper.writeFiles(files, opts, done); } function isMultiLayerFormat(fmt) { return fmt == 'svg' || fmt == 'topojson'; } function saveBlob(filename, blob, done) { if (window.navigator.msSaveBlob) { window.navigator.msSaveBlob(blob, filename); done(); } try { // revoke previous download url, if any. TODO: do this when download completes (how?) if (blobUrl) URL.revokeObjectURL(blobUrl); blobUrl = URL.createObjectURL(blob); } catch(e) { done("Mapshaper can't export files from this browser. Try switching to Chrome or Firefox."); return; } // TODO: handle errors anchor.href = blobUrl; anchor.download = filename; var clickEvent = document.createEvent("MouseEvent"); clickEvent.initMouseEvent("click", true, true, window, 0, 0, 0, 0, 0, false, false, false, false, 0, null); anchor.dispatchEvent(clickEvent); done(); } function saveZipFile(zipfileName, files, done) { var toAdd = files; try { zip.createWriter(new zip.BlobWriter("application/zip"), addFile, zipError); } catch(e) { // TODO: show proper error message, not alert done("This browser doesn't support Zip file creation."); } function zipError(msg) { var str = "Error creating Zip file"; if (msg) { str += ": " + (msg.message || msg); } done(str); } function addFile(archive) { if (toAdd.length === 0) { archive.close(function(blob) { saveBlob(zipfileName, blob, done); }); } else { var obj = toAdd.pop(), blob = new Blob([obj.content]); archive.add(obj.filename, new zip.BlobReader(blob), function() {addFile(archive);}); } } } }; function RepairControl(model, map) { var el = El("#intersection-display"), readout = el.findChild("#intersection-count"), btn = el.findChild("#repair-btn"), _self = this, _dataset, _currXX; model.on('update', function(e) { if (e.flags.simplify || e.flags.proj || e.flags.arc_count) { // these changes require nulling out any cached intersection data and recalculating if (_dataset) { _dataset.info.intersections = null; _dataset = null; _self.hide(); } delayedUpdate(); } else if (e.flags.select) { _self.hide(); if (!e.flags.import) { // Don't recalculate if a dataset was just imported -- another layer may be // selected right away. reset(); delayedUpdate(); } } }); model.on('mode', function(e) { if (e.prev == 'import') { // update if import just finished and a new dataset is being edited delayedUpdate(); } }); btn.on('click', function() { var fixed = MapShaper.repairIntersections(_dataset.arcs, _currXX); showIntersections(fixed); btn.addClass('disabled'); model.updated({repair: true}); }); this.hide = function() { el.hide(); map.setHighlightLayer(null); }; // Detect and display intersections for current level of arc simplification this.update = function() { var XX, showBtn, pct; if (!_dataset) return; if (_dataset.arcs.getRetainedInterval() > 0) { // TODO: cache these intersections XX = MapShaper.findSegmentIntersections(_dataset.arcs); showBtn = XX.length > 0; } else { // no simplification XX = _dataset.info.intersections; if (!XX) { // cache intersections at 0 simplification, to avoid recalculating // every time the simplification slider is set to 100% or the layer is selected at 100% XX = _dataset.info.intersections = MapShaper.findSegmentIntersections(_dataset.arcs); } showBtn = false; } el.show(); showIntersections(XX); btn.classed('disabled', !showBtn); }; function delayedUpdate() { setTimeout(function() { var e = model.getEditingLayer(); if (e.dataset && e.dataset != _dataset && !e.dataset.info.no_repair && MapShaper.layerHasPaths(e.layer)) { _dataset = e.dataset; _self.update(); } }, 10); } function reset() { _dataset = null; _currXX = null; _self.hide(); } function showIntersections(XX) { var n = XX.length, pointLyr; _currXX = XX; if (n > 0) { pointLyr = {geometry_type: 'point', shapes: [MapShaper.getIntersectionPoints(XX)]}; map.setHighlightLayer(pointLyr, {layers:[pointLyr]}); readout.text(utils.format("%s line intersection%s", n, utils.pluralSuffix(n))); } else { map.setHighlightLayer(null); readout.text(''); } } } utils.inherit(RepairControl, EventDispatcher); function LayerControl(model) { var el = El("#layer-control").on('click', gui.handleDirectEvent(model.clearMode)); var buttonLabel = El('#layer-control-btn .layer-name'); var isOpen = false; new ModeButton('#layer-control-btn .header-btn', 'layer_menu', model); model.addMode('layer_menu', turnOn, turnOff); model.on('update', function(e) { updateBtn(); if (isOpen) render(); }); function turnOn() { isOpen = true; render(); el.show(); } function turnOff() { isOpen = false; el.hide(); } function updateBtn() { var name = model.getEditingLayer().layer.name || "[unnamed layer]"; buttonLabel.html(name + "  ▼"); } function render() { var list = El('#layer-control .layer-list'); if (isOpen) { list.hide().empty(); model.forEachLayer(function(lyr, dataset) { list.appendChild(renderLayer(lyr, dataset)); }); list.show(); } } function describeLyr(lyr) { var n = MapShaper.getFeatureCount(lyr), str, type; if (lyr.data && !lyr.shapes) { type = 'data record'; } else if (lyr.geometry_type) { type = lyr.geometry_type + ' feature'; } if (type) { str = utils.format('%,d %s%s', n, type, utils.pluralSuffix(n)); } else { str = "[empty]"; } return str; } function describeSrc(lyr, dataset) { var file = dataset.info.input_files[0] || ''; if (utils.endsWith(file, '.shp') && !lyr.data && lyr == dataset.layers[0]) { file += " (missing .dbf)"; } return file; } function getDisplayName(name) { return name || '[unnamed]'; } function renderLayer(lyr, dataset) { var editLyr = model.getEditingLayer().layer; var entry = El('div').addClass('layer-item').classed('active', lyr == editLyr); var html = rowHTML('name', '' + getDisplayName(lyr.name) + ''); html += rowHTML('source file', describeSrc(lyr, dataset)); html += rowHTML('contents', describeLyr(lyr)); html += ''; entry.html(html); // init delete button entry.findChild('img').on('mouseup', function(e) { e.stopPropagation(); deleteLayer(lyr, dataset); }); // init name editor new ClickText2(entry.findChild('.layer-name')) .on('change', function(e) { var str = cleanLayerName(this.value()); this.value(getDisplayName(str)); lyr.name = str; updateBtn(); }); // init click-to-select gui.onClick(entry, function() { if (!gui.getInputElement()) { // don't select if user is typing model.clearMode(); if (lyr != editLyr) { model.updated({select: true}, lyr, dataset); } } }); return entry; } function deleteLayer(lyr, dataset) { var otherLyr = model.findAnotherLayer(lyr); if (otherLyr) { turnOff(); // avoid rendering twice if (model.getEditingLayer().layer == lyr) { // switch to a different layer if deleted layer was selected model.selectLayer(otherLyr.layer, otherLyr.dataset); } model.deleteLayer(lyr, dataset); turnOn(); } else { // refresh browser if deleted layer was the last layer window.location.href = window.location.href.toString(); } } function cleanLayerName(raw) { return raw.replace(/[\n\t/\\]/g, '') .replace(/^[\.\s]+/, '').replace(/[\.\s]+$/, ''); } function rowHTML(c1, c2) { return utils.format('
%s
' + '
%s
', c1, c2); } } // These functions could be called when validating i/o options; TODO: avoid this cli.isFile = cli.isDirectory = function(name) {return false;}; cli.validateOutputDir = function() {}; // Replaces functions for reading from files with functions that try to match // already-loaded datasets. // function ImportFileProxy(model) { // Try to match an imported dataset or layer. // TODO: think about handling import options function find(src) { var datasets = model.getDatasets(); var retn = datasets.reduce(function(memo, d) { var lyr; if (memo) return memo; // already found a match // try to match import filename of this dataset if (d.info.input_files[0] == src) return d; // try to match name of a layer in this dataset lyr = utils.find(d.layers, function(lyr) {return lyr.name == src;}); return lyr ? MapShaper.isolateLayer(lyr, d) : null; }, null); if (!retn) stop("Missing data layer [" + src + "]"); return retn; } api.importFile = function(src, opts) { var dataset = find(src); // Aeturn a copy with layers duplicated, so changes won't affect original layers // This makes an (unsafe) assumption that the dataset arcs won't be changed... // need to rethink this. return utils.defaults({ layers: dataset.layers.map(MapShaper.copyLayer) }, dataset); }; api.importDataTable = function(src, opts) { var dataset = find(src); return dataset.layers[0].data; }; } gui.getPixelRatio = function() { var deviceRatio = window.devicePixelRatio || window.webkitDevicePixelRatio || 1; return deviceRatio > 1 ? 2 : 1; }; function DisplayCanvas() { var _self = El('canvas'), _canvas = _self.node(), _ctx = _canvas.getContext('2d'), _ext; _self.prep = function(extent) { var w = extent.width(), h = extent.height(), pixRatio = gui.getPixelRatio(); _ctx.clearRect(0, 0, _canvas.width, _canvas.height); _canvas.width = w * pixRatio; _canvas.height = h * pixRatio; _self.classed('retina', pixRatio == 2); _self.show(); _ext = extent; }; _self.drawPathShapes = function(shapes, arcs, style) { var start = getPathStart(style, _ext), draw = getShapePencil(arcs, _ext), end = getPathEnd(style); for (var i=0, n=shapes.length; i 0 === false) continue; for (var j=0, m=shp ? shp.length : 0; j 0) { ctx.moveTo(x + radius, y); ctx.arc(x, y, radius, 0, Math.PI * 2, true); } } function drawSquare(x, y, size, ctx) { if (size > 0) { var offs = size / 2; x = Math.round(x - offs); y = Math.round(y - offs); ctx.fillRect(x, y, size, size); } } function drawPath(vec, t, ctx) { var minLen = gui.getPixelRatio() > 1 ? 1 : 0.6, x, y, xp, yp; if (!vec.hasNext()) return; x = xp = vec.x * t.mx + t.bx; y = yp = vec.y * t.my + t.by; ctx.moveTo(x, y); while (vec.hasNext()) { x = vec.x * t.mx + t.bx; y = vec.y * t.my + t.by; if (Math.abs(x - xp) > minLen || Math.abs(y - yp) > minLen) { ctx.lineTo(x, y); xp = x; yp = y; } } if (x != xp || y != yp) { ctx.lineTo(x, y); } } function getShapePencil(arcs, ext) { var t = getScaledTransform(ext); return function(shp, ctx) { var iter = new MapShaper.ShapeIter(arcs); if (!shp) return; for (var i=0; i 60) { lineScale *= Math.pow(mapScale - 59, 0.18); } return function(ctx, i) { var strokeWidth; ctx.beginPath(); if (styler) { styler(style, i); } if (style.opacity >= 0) { ctx.globalAlpha = style.opacity; } if (style.strokeWidth > 0) { strokeWidth = style.strokeWidth; if (pixRatio > 1) { // bump up thin lines on retina, but not to more than 1px (too slow) strokeWidth = strokeWidth < 1 ? 1 : strokeWidth * pixRatio; } ctx.lineCap = 'round'; ctx.lineJoin = 'round'; ctx.lineWidth = strokeWidth * lineScale; ctx.strokeStyle = style.strokeColor; } if (style.fillColor) { ctx.fillStyle = style.fillColor; } }; } function getPathEnd(style) { return function(ctx) { if (style.fillColor) ctx.fill(); if (style.strokeWidth > 0) ctx.stroke(); if (style.opacity >= 0) ctx.globalAlpha = 1; ctx.closePath(); }; } // A wrapper for ArcCollection that filters paths to speed up rendering. // function FilteredArcCollection(unfilteredArcs) { var sortedThresholds, filteredArcs, filteredSegLen; init(); function init() { var size = unfilteredArcs.getPointCount(), cutoff = 5e5, nth; sortedThresholds = filteredArcs = null; if (!!unfilteredArcs.getVertexData().zz) { // If we have simplification data... // Sort simplification thresholds for all non-endpoint vertices // for quick conversion of simplification percentage to threshold value. // For large datasets, use every nth point, for faster sorting. nth = Math.ceil(size / cutoff); sortedThresholds = unfilteredArcs.getRemovableThresholds(nth); utils.quicksort(sortedThresholds, false); // For large datasets, create a filtered copy of the data for faster rendering if (size > cutoff) { filteredArcs = initFilteredArcs(unfilteredArcs, sortedThresholds); filteredSegLen = MapShaper.getAvgSegment(filteredArcs); } } else { if (size > cutoff) { // generate filtered arcs when no simplification data is present filteredSegLen = MapShaper.getAvgSegment(unfilteredArcs) * 4; filteredArcs = MapShaper.simplifyArcsFast(unfilteredArcs, filteredSegLen); } } } // Use simplification data to create a low-detail copy of arcs, for faster // rendering when zoomed-out. function initFilteredArcs(arcs, sortedThresholds) { var filterPct = 0.08; var currInterval = arcs.getRetainedInterval(); var filterZ = sortedThresholds[Math.floor(filterPct * sortedThresholds.length)]; var filteredArcs = arcs.setRetainedInterval(filterZ).getFilteredCopy(); arcs.setRetainedInterval(currInterval); // reset current simplification return filteredArcs; } this.getArcCollection = function(ext) { refreshFilteredArcs(); // Use a filtered version of arcs at small scales var unitsPerPixel = 1/ext.getTransform().mx, useFiltering = filteredArcs && unitsPerPixel > filteredSegLen * 1.5; return useFiltering ? filteredArcs : unfilteredArcs; }; function refreshFilteredArcs() { if (filteredArcs) { if (filteredArcs.size() != unfilteredArcs.size()) { init(); } filteredArcs.setRetainedInterval(unfilteredArcs.getRetainedInterval()); } } this.size = function() {return unfilteredArcs.size();}; this.setRetainedPct = function(pct) { if (sortedThresholds) { var z = sortedThresholds[Math.floor(pct * sortedThresholds.length)]; z = MapShaper.clampIntervalByPct(z, pct); // this.setRetainedInterval(z); unfilteredArcs.setRetainedInterval(z); } else { unfilteredArcs.setRetainedPct(pct); } }; } gui.getDisplayLayerForTable = function(table) { var n = table.size(), cellWidth = 12, cellHeight = 5, gutter = 6, arcs = [], shapes = [], lyr = {shapes: shapes}, data = {layer: lyr}, aspectRatio = 1.1, usePoints = false, x, y, col, row, blockSize; if (n > 10000) { usePoints = true; gutter = 0; cellWidth = 4; cellHeight = 4; aspectRatio = 1.45; } else if (n > 5000) { cellWidth = 5; gutter = 3; aspectRatio = 1.45; } else if (n > 1000) { gutter = 3; cellWidth = 8; aspectRatio = 1.3; } if (n < 25) { blockSize = n; } else { blockSize = Math.sqrt(n * (cellWidth + gutter) / cellHeight / aspectRatio) | 0; } for (var i=0; i 0 || arcBounds.area() === 0) { bounds = lyrBounds; } // if a point layer has no extent (e.g. contains only a single point), // then use arc bounds (if present), to match any path layers in the dataset. } } // If a layer has collapsed, inflate it by a default amount if (bounds.width() === 0) { bounds.xmin = (bounds.centerX() || 0) - 1; bounds.xmax = bounds.xmin + 2; } if (bounds.height() === 0) { bounds.ymin = (bounds.centerY() || 0) - 1; bounds.ymax = bounds.ymin + 2; } return bounds; } function HighlightBox(el) { var stroke = 2, box = El('div').addClass('zoom-box').appendTo(el).hide(); this.show = function(x1, y1, x2, y2) { var w = Math.abs(x1 - x2), h = Math.abs(y1 - y2); box.css({ top: Math.min(y1, y2), left: Math.min(x1, x2), width: Math.max(w - stroke * 2, 1), height: Math.max(h - stroke * 2, 1) }); box.show(); }; this.hide = function() { box.hide(); }; } gui.addSidebarButton = function(iconId) { var btn = El('div').addClass('nav-btn') .on('dblclick', function(e) {e.stopPropagation();}); // block dblclick zoom btn.appendChild(iconId); btn.appendTo('#nav-buttons'); return btn; }; function MapNav(root, ext, mouse) { var wheel = new MouseWheel(mouse), zoomBox = new HighlightBox('body'), buttons = El('div').id('nav-buttons').appendTo(root), zoomTween = new Tween(Tween.sineInOut), shiftDrag = false, zoomScale = 2.5, dragStartEvt, _fx, _fy; // zoom foci, [0,1] gui.addSidebarButton("#home-icon").on('click', function() {ext.reset();}); gui.addSidebarButton("#zoom-in-icon").on('click', zoomIn); gui.addSidebarButton("#zoom-out-icon").on('click', zoomOut); zoomTween.on('change', function(e) { ext.rescale(e.value, _fx, _fy); }); mouse.on('dblclick', function(e) { zoomByPct(zoomScale, e.x / ext.width(), e.y / ext.height()); }); mouse.on('dragstart', function(e) { shiftDrag = !!e.shiftKey; if (shiftDrag) { dragStartEvt = e; } }); mouse.on('drag', function(e) { if (shiftDrag) { zoomBox.show(e.pageX, e.pageY, dragStartEvt.pageX, dragStartEvt.pageY); } else { ext.pan(e.dx, e.dy); } }); mouse.on('dragend', function(e) { var bounds; if (shiftDrag) { shiftDrag = false; bounds = new Bounds(e.x, e.y, dragStartEvt.x, dragStartEvt.y); zoomBox.hide(); if (bounds.width() > 5 && bounds.height() > 5) { zoomToBox(bounds); } } }); wheel.on('mousewheel', function(e) { var k = 1 + (0.11 * e.multiplier), delta = e.direction > 0 ? k : 1 / k; ext.rescale(ext.scale() * delta, e.x / ext.width(), e.y / ext.height()); }); function zoomIn() { zoomByPct(zoomScale, 0.5, 0.5); } function zoomOut() { zoomByPct(1/zoomScale, 0.5, 0.5); } // @box Bounds with pixels from t,l corner of map area. function zoomToBox(box) { var pct = Math.max(box.width() / ext.width(), box.height() / ext.height()), fx = box.centerX() / ext.width() * (1 + pct) - pct / 2, fy = box.centerY() / ext.height() * (1 + pct) - pct / 2; zoomByPct(1 / pct, fx, fy); } // @pct Change in scale (2 = 2x zoom) // @fx, @fy zoom focus, [0, 1] function zoomByPct(pct, fx, fy) { _fx = fx; _fy = fy; zoomTween.start(ext.scale(), ext.scale() * pct, 400); } } function MapExtent(el) { var _position = new ElementPosition(el), _scale = 1, _cx, _cy, _contentBounds; _position.on('resize', function() { this.dispatchEvent('change'); this.dispatchEvent('navigate'); this.dispatchEvent('resize'); }, this); this.reset = function(force) { this.recenter(_contentBounds.centerX(), _contentBounds.centerY(), 1, force); }; this.recenter = function(cx, cy, scale, force) { if (!scale) scale = _scale; if (force || !(cx == _cx && cy == _cy && scale == _scale)) { _cx = cx; _cy = cy; _scale = scale; this.dispatchEvent('change'); this.dispatchEvent('navigate'); } }; this.pan = function(xpix, ypix) { var t = this.getTransform(); this.recenter(_cx - xpix / t.mx, _cy - ypix / t.my); }; // Zoom to @scale (a multiple of the map's full scale) // @xpct, @ypct: optional focus, [0-1]... this.rescale = function(scale, xpct, ypct) { if (arguments.length < 3) { xpct = 0.5; ypct = 0.5; } var b = this.getBounds(), fx = b.xmin + xpct * b.width(), fy = b.ymax - ypct * b.height(), dx = b.centerX() - fx, dy = b.centerY() - fy, ds = _scale / scale, dx2 = dx * ds, dy2 = dy * ds, cx = fx + dx2, cy = fy + dy2; this.recenter(cx, cy, scale); }; this.resize = _position.resize; this.width = _position.width; this.height = _position.height; this.position = _position.position; // get zoom factor (1 == full extent, 2 == 2x zoom, etc.) this.scale = function() { return _scale; }; this.getPixelSize = function() { return 1 / this.getTransform().mx; }; // Get params for converting geographic coords to pixel coords this.getTransform = function(pixScale) { // get transform (y-flipped); var viewBounds = new Bounds(0, 0, _position.width(), _position.height()); if (pixScale) { viewBounds.xmax *= pixScale; viewBounds.ymax *= pixScale; } return this.getBounds().getTransform(viewBounds, true); }; this.getBounds = function() { if (!_contentBounds) return new Bounds(); return centerAlign(calcBounds(_cx, _cy, _scale)); }; // Update the extent of 'full' zoom without navigating the current view this.setBounds = function(b) { var prev = _contentBounds; _contentBounds = b; if (prev) { _scale = _scale * centerAlign(b).width() / centerAlign(prev).width(); } else { _cx = b.centerX(); _cy = b.centerY(); } }; function getPadding(size) { return size * 0.020 + 4; } function calcBounds(cx, cy, scale) { var w = _contentBounds.width() / scale, h = _contentBounds.height() / scale; return new Bounds(cx - w/2, cy - h/2, cx + w/2, cy + h/2); } // Receive: Geographic bounds of content to be centered in the map // Return: Geographic bounds of map window centered on @_contentBounds, // with padding applied function centerAlign(_contentBounds) { var bounds = _contentBounds.clone(), wpix = _position.width(), hpix = _position.height(), xmarg = getPadding(wpix), ymarg = getPadding(hpix), xpad, ypad; wpix -= 2 * xmarg; hpix -= 2 * ymarg; if (wpix <= 0 || hpix <= 0) { return new Bounds(0, 0, 0, 0); } bounds.fillOut(wpix / hpix); xpad = bounds.width() / wpix * xmarg; ypad = bounds.height() / hpix * ymarg; bounds.padBounds(xpad, ypad, xpad, ypad); return bounds; } } utils.inherit(MapExtent, EventDispatcher); function HitControl(ext, mouse) { var self = new EventDispatcher(); var prevHits = []; var active = false; var tests = { polygon: polygonTest, polyline: polylineTest, point: pointTest }; var coords = El('#coordinate-info').hide(); var lyr, target, test; ext.on('change', function() { // shapes may change along with map scale target = lyr ? lyr.getDisplayLayer() : null; }); self.setLayer = function(o) { lyr = o; target = o.getDisplayLayer(); test = tests[target.layer.geometry_type]; coords.hide(); }; self.start = function() { active = true; }; self.stop = function() { if (active) { hover([]); coords.text('').hide(); active = false; } }; mouse.on('click', function(e) { if (!active || !target) return; trigger('click', prevHits); gui.selectElement(coords.node()); }); // DISABLING: This causes problems when hovering over the info panel // Deselect hover shape when pointer leaves hover area //mouse.on('leave', function(e) { // hover(-1); //}); mouse.on('hover', function(e) { var p, decimals; if (!active || !target) return; p = ext.getTransform().invert().transform(e.x, e.y); if (target.geographic) { // update coordinate readout if displaying geographic shapes decimals = getCoordPrecision(ext.getBounds()); coords.text(p[0].toFixed(decimals) + ', ' + p[1].toFixed(decimals)).show(); } if (test && e.hover) { hover(test(p[0], p[1])); } }); // Convert pixel distance to distance in coordinate units. function getHitBuffer(pix) { var dist = pix / ext.getTransform().mx, scale = ext.scale(); if (scale < 1) dist *= scale; // reduce hit threshold when zoomed out return dist; } function getCoordPrecision(bounds) { var min = Math.min(Math.abs(bounds.xmax), Math.abs(bounds.ymax)), decimals = Math.ceil(Math.log(min) / Math.LN10); return Math.max(0, 7 - decimals); } function polygonTest(x, y) { var dist = getHitBuffer(5), cands = findHitCandidates(x, y, dist), hits = [], cand, hitId; for (var i=0; i 0 && hits.length === 0) { // secondary detection: proximity, if not inside a polygon hits = findNearestCandidates(x, y, dist, cands, target.dataset.arcs); } return hits; } function polylineTest(x, y) { var dist = getHitBuffer(15), cands = findHitCandidates(x, y, dist); return findNearestCandidates(x, y, dist, cands, target.dataset.arcs); } function findNearestCandidates(x, y, dist, cands, arcs) { var hits = [], cand, candDist; for (var i=0; i 0 ? hits[0] : -1 }); } function hover(hits) { if (!sameIds(hits, prevHits)) { prevHits = hits; El('#map-layers').classed('hover', hits.length > 0); trigger('hover', hits); } } function findHitCandidates(x, y, dist) { var arcs = target.dataset.arcs, index = {}, cands = [], bbox = []; target.layer.shapes.forEach(function(shp, shpId) { var cand; for (var i = 0, n = shp && shp.length; i < n; i++) { arcs.getSimpleShapeBounds2(shp[i], bbox); if (x + dist < bbox[0] || x - dist > bbox[2] || y + dist < bbox[1] || y - dist > bbox[3]) { continue; // bbox non-intersection } cand = index[shpId]; if (!cand) { cand = index[shpId] = {shape: [], id: shpId}; cands.push(cand); } cand.shape.push(shp[i]); } }); return cands; } return self; } function Popup() { var parent = El('#mshp-main-map'); var el = El('div').addClass('popup').appendTo(parent).hide(); // var head = El('div').addClass('popup-head').appendTo(el).text('Feature 1 of 5 next prev'); var content = El('div').addClass('popup-content').appendTo(el); this.show = function(rec, table, editable) { var maxHeight = parent.node().clientHeight - 36; this.hide(); // clean up if panel is already open render(content, rec, table, editable); el.show(); if (content.node().clientHeight > maxHeight) { content.css('height:' + maxHeight + 'px'); } }; this.hide = function() { // make sure any pending edits are made before re-rendering popup // TODO: only blur popup fields gui.blurActiveElement(); content.empty(); content.node().removeAttribute('style'); // remove inline height el.hide(); }; function render(el, rec, table, editable) { var tableEl = El('table').addClass('selectable'), rows = 0; utils.forEachProperty(rec, function(v, k) { var type = MapShaper.getFieldType(v, k, table); renderRow(tableEl, rec, k, type, editable); rows++; }); if (rows > 0) { tableEl.appendTo(el); } else { el.html('
This layer is missing attribute data.
'); } } function renderRow(table, rec, key, type, editable) { var rowHtml = '%s%s '; var val = rec[key]; var cell = El('tr') .appendTo(table) .html(utils.format(rowHtml, key, utils.htmlEscape(val))) .findChild('.value'); setFieldClass(cell, val, type); if (editable) { editItem(cell, rec, key, type); } } function setFieldClass(el, val, type) { var isNum = type ? type == 'number' : utils.isNumber(val); var isNully = val === undefined || val === null || val !== val; var isEmpty = val === ''; el.classed('num-field', isNum); el.classed('null-value', isNully); el.classed('empty', isEmpty); } function editItem(el, rec, key, type) { var input = new ClickText2(el), strval = String(rec[key]), parser = MapShaper.getInputParser(type); el.parent().addClass('editable-cell'); el.addClass('colored-text dot-underline'); input.on('change', function(e) { var val2 = parser(input.value()), strval2 = String(val2); if (strval == strval2) { // contents unchanged } else if (val2 === null) { // invalid value; revert to previous value input.value(strval); } else { // field content has changed; strval = strval2; rec[key] = val2; input.value(strval); setFieldClass(el, val2, type); } }); } } MapShaper.inputParsers = { string: function(raw) { return raw; }, number: function(raw) { var val = Number(raw); if (raw == 'NaN') { val = NaN; } else if (isNaN(val)) { val = null; } return val; }, boolean: function(raw) { var val = null; if (raw == 'true') { val = true; } else if (raw == 'false') { val = false; } return val; }, multiple: function(raw) { var val = Number(raw); return isNaN(val) ? raw : val; } }; MapShaper.getInputParser = function(type) { return MapShaper.inputParsers[type || 'multiple']; }; MapShaper.getValueType = function(val) { var type = null; if (utils.isString(val)) { type = 'string'; } else if (utils.isNumber(val)) { type = 'number'; } else if (utils.isBoolean(val)) { type = 'boolean'; } return type; }; MapShaper.getColumnType = function(key, table) { var records = table.getRecords(), type = null; for (var i=0, n=records.length; i 1) { if (kc == 37) { id = (_highId + n - 1) % n; } else { id = (_highId + 1) % n; } inspect(id, true); e.stopPropagation(); } } }, !!'capture'); // preempt the layer control's arrow key handler hit.on('click', function(e) { var id = e.id; var pin = false; if (_pinned && id == _highId) { // clicking on pinned shape: unpin } else if (!_pinned && id > -1) { // clicking on unpinned shape while unpinned: pin pin = true; } else if (_pinned && id > -1) { // clicking on unpinned shape while pinned: pin new shape pin = true; } else if (!_pinned && id == -1) { // clicking off the layer while pinned: unpin and deselect } inspect(id, pin, e.ids); }); hit.on('hover', function(e) { var id = e.id; if (!_inspecting || _pinned) return; inspect(id, false, e.ids); }); function showInspector(id, editable) { var o = _lyr.getDisplayLayer(); var table = o.layer.data || null; var rec = table ? table.getRecordAt(id) : {}; _popup.show(rec, table, editable); } // @id Id of a feature in the active layer, or -1 function inspect(id, pin, ids) { if (!_inspecting) return; if (id > -1) { showInspector(id, pin); } else { _popup.hide(); } _highId = id; _pinned = pin; _self.dispatchEvent('change', { selection_ids: _selectionIds || [], hover_ids: ids || [], id: id, pinned: pin }); } function turnOn() { btn.addClass('selected'); _inspecting = true; hit.start(); } function turnOff() { btn.removeClass('selected'); hit.stop(); _selectionIds = null; inspect(-1); // clear the map _inspecting = false; } return _self; } var MapStyle = (function() { var darkStroke = "#334", lightStroke = "#b2d83a", pink = "#f74b80", // dark pink2 = "rgba(239, 0, 86, 0.16)", // "#ffd9e7", // medium gold = "#efc100", black = "black", selectionFill = "rgba(237, 214, 0, 0.12)", hoverFill = "rgba(255, 117, 165, 0.18)", outlineStyle = { type: 'outline', strokeColors: [lightStroke, darkStroke], strokeWidth: 0.7, dotColor: "#223" }, highStyle = { dotColor: "#F24400" }, hoverStyles = { polygon: { fillColor: hoverFill, strokeColor: black, strokeWidth: 1.2 }, point: { dotColor: black, dotSize: 8 }, polyline: { strokeColor: black, strokeWidth: 2.5 } }, selectionStyles = { polygon: { fillColor: selectionFill, strokeColor: gold, strokeWidth: 1 }, point: { dotColor: gold, dotSize: 6 }, polyline: { strokeColor: gold, strokeWidth: 1.8 } }, selectionHoverStyles = { polygon: { fillColor: selectionFill, strokeColor: black, strokeWidth: 1.2 }, point: { dotColor: black, dotSize: 6 }, polyline: { strokeColor: black, strokeWidth: 2.5 } }, pinnedStyles = { polygon: { fillColor: pink2, strokeColor: pink, strokeWidth: 1.8 }, point: { dotColor: pink, dotSize: 8 }, polyline: { strokeColor: pink, strokeWidth: 3 } }; return { getHighlightStyle: function() { return highStyle; }, getActiveStyle: function(lyr) { var style; if (MapShaper.layerHasSvgDisplayStyle(lyr)) { style = MapShaper.getSvgDisplayStyle(lyr); } else { style = utils.extend({}, outlineStyle); style.dotSize = calcDotSize(MapShaper.countPointsInLayer(lyr)); } return style; }, getOverlayStyle: getOverlayStyle }; function calcDotSize(n) { return n < 20 && 5 || n < 500 && 4 || n < 50000 && 3 || 2; } function getOverlayStyle(lyr, o) { var type = lyr.geometry_type; var topId = o.id; var ids = []; var styles = []; var styler = function(o, i) { utils.extend(o, styles[i]); }; var overlayStyle = { styler: styler }; // first layer: selected feature(s) o.selection_ids.forEach(function(i) { // skip features in a higher layer if (i == topId || o.hover_ids.indexOf(i) > -1) return; ids.push(i); styles.push(selectionStyles[type]); }); // second layer: hover feature(s) o.hover_ids.forEach(function(i) { var style; if (i == topId) return; style = o.selection_ids.indexOf(i) > -1 ? selectionHoverStyles[type] : hoverStyles[type]; ids.push(i); styles.push(style); }); // top layer: highlighted feature if (topId > -1) { var isPinned = o.pinned; var inSelection = o.selection_ids.indexOf(topId) > -1; var style; if (isPinned) { style = pinnedStyles[type]; } else if (inSelection) { style = selectionHoverStyles[type]; // TODO: differentiate from other hover ids } else { style = hoverStyles[type]; // TODO: differentiate from other hover ids } ids.push(topId); styles.push(style); } if (MapShaper.layerHasSvgDisplayStyle(lyr)) { if (type == 'point') { overlayStyle = MapShaper.wrapHoverStyle(MapShaper.getSvgDisplayStyle(lyr), overlayStyle); } overlayStyle.type = 'styled'; } overlayStyle.ids = ids; return ids.length > 0 ? overlayStyle : null; } }()); // Modify style to use scaled circle instead of dot symbol MapShaper.wrapHoverStyle = function(style, hoverStyle) { var styler = function(obj, i) { var dotColor; style.styler(obj, i); if (hoverStyle.styler) { hoverStyle.styler(obj, i); } dotColor = obj.dotColor; if (obj.radius && dotColor) { obj.radius += 1.5; obj.fillColor = dotColor; obj.strokeColor = dotColor; obj.opacity = 1; } }; return {styler: styler}; }; MapShaper.getSvgDisplayStyle = function(lyr) { var records = lyr.data.getRecords(), fields = MapShaper.getSvgStyleFields(lyr), index = MapShaper.svgStyles; var styler = function(style, i) { var f, key, val; for (var j=0; j 500 || areaRatio < 0.05; // reset if layer is not at a viewable scale }; function MshpMap(model) { var _root = El('#mshp-main-map'), _layers = El('#map-layers'), _ext = new MapExtent(_layers), _mouse = new MouseArea(_layers.node()), _nav = new MapNav(_root, _ext, _mouse), _inspector = new InspectionControl(model, new HitControl(_ext, _mouse)); var _activeCanv = new DisplayCanvas().appendTo(_layers), // data layer shapes _overlayCanv = new DisplayCanvas().appendTo(_layers), // hover and selection shapes _annotationCanv = new DisplayCanvas().appendTo(_layers), // used for line intersections _annotationLyr, _annotationStyle, _activeLyr, _activeStyle, _overlayStyle; _ext.on('change', drawLayers); _inspector.on('change', function(e) { var lyr = _activeLyr.getDisplayLayer().layer; _overlayStyle = MapStyle.getOverlayStyle(lyr, e); drawLayer(_activeLyr, _overlayCanv, _overlayStyle); }); model.on('select', function(e) { _annotationStyle = null; _overlayStyle = null; }); model.on('update', function(e) { var prevBounds = _activeLyr ?_activeLyr.getBounds() : null, needReset = false; if (arcsMayHaveChanged(e.flags)) { // regenerate filtered arcs when simplification thresholds are calculated // or arcs are updated delete e.dataset.filteredArcs; // reset simplification after projection (thresholds have changed) // TODO: preserve simplification pct (need to record pct before change) if (e.flags.proj && e.dataset.arcs) { e.dataset.arcs.setRetainedPct(1); } } _activeLyr = initActiveLayer(e); needReset = gui.mapNeedsReset(_activeLyr.getBounds(), prevBounds, _ext.getBounds()); _ext.setBounds(_activeLyr.getBounds()); // update map extent to match bounds of active group if (needReset) { // zoom to full view of the active layer and redraw _ext.reset(true); } else { // refresh without navigating drawLayers(); } }); this.setHighlightLayer = function(lyr, dataset) { if (lyr) { _annotationLyr = new DisplayLayer(lyr, dataset, _ext); _annotationStyle = MapStyle.getHighlightStyle(); drawLayer(_annotationLyr, _annotationCanv, _annotationStyle); } else { _annotationStyle = null; _annotationLyr = null; } }; // lightweight way to update simplification of display lines // TODO: consider handling this as a model update this.setSimplifyPct = function(pct) { _activeLyr.setRetainedPct(pct); drawLayers(); }; function initActiveLayer(o) { var lyr = new DisplayLayer(o.layer, o.dataset, _ext); _inspector.updateLayer(lyr); _activeStyle = MapStyle.getActiveStyle(lyr.getDisplayLayer().layer); lyr.updateStyle(_activeStyle); return lyr; } // Test if an update may have affected the visible shape of arcs // @flags Flags from update event function arcsMayHaveChanged(flags) { return flags.presimplify || flags.simplify || flags.proj || flags.arc_count || flags.repair; } function drawLayers() { drawLayer(_activeLyr, _overlayCanv, _overlayStyle); drawLayer(_activeLyr, _activeCanv, _activeStyle); drawLayer(_annotationLyr, _annotationCanv, _annotationStyle); } function drawLayer(lyr, canv, style) { if (style) { canv.prep(_ext); lyr.draw(canv, style); } else { canv.hide(); } } } utils.inherit(MshpMap, EventDispatcher); function Console(model) { var CURSOR = '$ '; var PROMPT = 'Enter mapshaper commands or type "tips" for examples and console help'; var el = El('#console').hide(); var content = El('#console-buffer'); var log = El('div').id('console-log').appendTo(content); var line = El('div').id('command-line').appendTo(content).text(CURSOR); var input = El('span').appendTo(line) .addClass('input-field') .attr('spellcheck', false) .attr('autocorrect', false) .attr('contentEditable', true) .on('focus', receiveFocus) .on('paste', onPaste); var history = []; var historyId = 0; var _isOpen = false; var _error = MapShaper.error; // save default error functions... var _stop = MapShaper.stop; // capture all messages to this console, whether open or closed message = MapShaper.message = consoleMessage; message(PROMPT); document.addEventListener('keydown', onKeyDown); new ModeButton('#console-btn', 'console', model); model.addMode('console', turnOn, turnOff); gui.onClick(content, function(e) { var targ = El(e.target); if (gui.getInputElement() || targ.hasClass('console-message')) { // don't focus if user is typing or user clicks content area } else { input.node().focus(); } }); function toLog(str, cname) { var msg = El('div').text(str).appendTo(log); if (cname) { msg.addClass(cname); } scrollDown(); } function turnOn() { if (!_isOpen && !!model.getEditingLayer()) { _isOpen = true; stop = MapShaper.stop = consoleStop; error = MapShaper.error = consoleError; el.show(); input.node().focus(); } } function turnOff() { if (_isOpen) { _isOpen = false; stop = MapShaper.stop = _stop; // restore original error functions error = MapShaper.error = _error; el.hide(); input.node().blur(); } } function onPaste(e) { // paste plain text (remove any copied HTML tags) e.preventDefault(); var str = (e.originalEvent || e).clipboardData.getData('text/plain'); document.execCommand("insertHTML", false, str); } function receiveFocus() { placeCursor(); } function placeCursor() { var el = input.node(); var range, selection; if (readCommandLine().length > 0) { // move cursor to end of text range = document.createRange(); range.selectNodeContents(el); range.collapse(false); //collapse the range to the end point. selection = window.getSelection(); selection.removeAllRanges(); selection.addRange(range); } } function scrollDown() { var el = content.parent().node(); el.scrollTop = el.scrollHeight; } function metaKey(e) { return e.metaKey || e.ctrlKey || e.altKey; } function onKeyDown(e) { var kc = e.keyCode, inputEl = gui.getInputElement(), typing = !!inputEl, typingInConsole = inputEl && inputEl == input.node(), inputText = readCommandLine(), capture = false; // esc key if (kc == 27) { if (typing) { inputEl.blur(); } model.clearMode(); // esc escapes other modes as well capture = true; // l/r arrow keys while not typing in a text field } else if ((kc == 37 || kc == 39) && (!typing || typingInConsole && !inputText)) { if (kc == 37) { model.selectPrevLayer(); } else { model.selectNextLayer(); } // delete key while not inputting text } else if (kc == 8 && !typing) { capture = true; // prevent delete from leaving page // any key while console is open } else if (_isOpen) { capture = true; if (kc == 13) { // enter submit(); } else if (kc == 9) { // tab tabComplete(); } else if (kc == 38) { back(); } else if (kc == 40) { forward(); } else if (kc == 32 && (!typing || (inputText === '' && typingInConsole))) { // space bar closes if nothing has been typed model.clearMode(); } else if (!typing && e.target != input.node() && !metaKey(e)) { // typing returns focus, unless a meta key is down (to allow Cmd-C copy) // or user is typing in a different input area somewhere input.node().focus(); capture = false; } else { // normal typing capture = false; } // space bar while not inputting text } else if (!typing && kc == 32) { // space bar opens console, unless typing in an input field or editable el capture = true; model.enterMode('console'); } if (capture) { e.preventDefault(); } } // tab-completion for field names function tabComplete() { var line = readCommandLine(), match = /\w+$/.exec(line), stub = match ? match[0] : '', lyr = model.getEditingLayer().layer, names, name; if (stub && lyr.data) { names = findCompletions(stub, lyr.data.getFields()); if (names.length > 0) { name = utils.getCommonFileBase(names); if (name.length > stub.length) { toCommandLine(line.substring(0, match.index) + name); } } } } function findCompletions(str, fields) { return fields.filter(function(name) { return name.indexOf(str) === 0; }); } function readCommandLine() { return input.node().textContent.trim(); } function toCommandLine(str) { input.node().textContent = str.trim(); placeCursor(); } function peekHistory(i) { var idx = history.length - 1 - (i || 0); return idx >= 0 ? history[idx] : null; } function toHistory(str) { if (historyId > 0) { // if we're back in the history stack if (peekHistory() === '') { // remove empty string (which may have been appended when user started going back) history.pop(); } historyId = 0; // move back to the top of the stack } if (str && str != peekHistory()) { history.push(str); } } function fromHistory() { toCommandLine(peekHistory(historyId)); } function back() { if (history.length === 0) return; if (historyId === 0) { history.push(readCommandLine()); } historyId = Math.min(history.length - 1, historyId + 1); fromHistory(); } function forward() { if (historyId <= 0) return; historyId--; fromHistory(); if (historyId === 0) { history.pop(); } } function clear() { log.empty(); scrollDown(); } function getCommandFlags(commands) { return commands.reduce(function(memo, cmd) { memo[cmd.name] = true; return memo; }, {}); } function submit() { var cmd = readCommandLine(); toCommandLine(''); toLog(CURSOR + cmd); if (cmd) { if (cmd == 'clear') { clear(); } else if (cmd == 'tips') { printExamples(); } else if (cmd == 'layers') { message("Available layers:", MapShaper.getFormattedLayerList(model.getEditingLayer().dataset.layers)); } else if (cmd == 'close' || cmd == 'exit' || cmd == 'quit') { model.clearMode(); } else if (cmd) { runMapshaperCommands(cmd); } toHistory(cmd); } } function runMapshaperCommands(str) { var commands, target; try { commands = MapShaper.parseConsoleCommands(str); commands = MapShaper.runAndRemoveInfoCommands(commands); target = model.getEditingLayer(); } catch (e) { return onError(e); } if (target.layer && commands.length > 0) { applyParsedCommands(commands, target.layer, target.dataset); } } function applyParsedCommands(commands, lyr, dataset) { var lyrId = dataset.layers.indexOf(lyr), prevArcCount = dataset.arcs ? dataset.arcs.size() : 0; // most commands should target the currently edited layer unless // user has specified a different target commands.forEach(function(cmd) { if (!cmd.options.target && cmd.name != 'rename-layers' && cmd.name != 'merge-layers') { cmd.options.target = String(lyrId); } }); MapShaper.runParsedCommands(commands, dataset, function(err) { var flags = getCommandFlags(commands), outputLyr = getOutputLayer(lyrId, dataset, commands); if (prevArcCount > 0 && dataset.arcs.size() != prevArcCount) { // kludge to signal map that filtered arcs need refreshing flags.arc_count = true; } model.updated(flags, outputLyr, dataset); // signal the map to update even if an error has occured, because the // commands may have partially succeeded and changes may have occured to // the data. if (err) onError(err); }); } // try to get the output layer from the last console command // (if multiple layers are output, pick one of the output layers) // @lyrId index of the currently edited layer function getOutputLayer(lyrId, dataset, commands) { var lastCmd = commands[commands.length-1], layers = dataset.layers, lyr; if (lastCmd.options.no_replace) { // pick last layer if a new layer has been created // (new layers should be appended to the list of layers -- need to test) lyr = layers[layers.length-1]; } else { // use the layer in the same position as the currently selected layer; // this may not be the output layer if a different layer was explicitly // targeted. lyr = layers[lyrId] || layers[0]; } return lyr; } function onError(err) { if (utils.isString(err)) { stop(err); } else if (err.name == 'APIError') { // stop() has already been called, don't need to log } else if (err.name) { // log stack trace to browser console console.error(err.stack); // log to console window warning(err.message); } } function consoleStop() { var msg = gui.formatMessageArgs(arguments); warning(msg); throw new APIError(msg); } function warning() { var msg = gui.formatMessageArgs(arguments); toLog(msg, 'console-error'); } function consoleMessage() { var msg = gui.formatMessageArgs(arguments); toLog(msg, 'console-message'); } function consoleError() { var msg = gui.formatMessageArgs(arguments); throw new Error(msg); } function printExample(comment, command) { toLog(comment, 'console-message'); toLog(command, 'console-example'); } function printExamples() { printExample("See a list of all console commands", "$ help"); printExample("Get help using a single command", "$ help innerlines"); printExample("Get information about the active data layer", "$ info"); printExample("Delete one state from a national dataset","$ filter 'STATE != \"Alaska\"'"); printExample("Aggregate counties to states by dissolving shared edges" ,"$ dissolve 'STATE'"); printExample("Clear the console", "$ clear"); } } function Model() { var datasets = [], self = this, mode = null, editing; this.forEachLayer = function(cb) { var i = 0; datasets.forEach(function(dataset) { dataset.layers.forEach(function(lyr) { cb(lyr, dataset, i++); }); }); }; this.deleteLayer = function(lyr, dataset) { var layers = dataset.layers; layers.splice(layers.indexOf(lyr), 1); if (layers.length === 0) { this.removeDataset(dataset); } }; this.findLayer = function(target) { var found = null; this.forEachLayer(function(lyr, dataset) { if (lyr == target) { found = layerObject(lyr, dataset); } }); return found; }; this.findAnotherLayer = function(target) { var layers = this.getLayers(), found = null; if (layers.length > 1) { found = layers[0].layer == target ? layers[1] : layers[0]; } return found; }; this.removeDataset = function(target) { if (target == (editing && editing.dataset)) { error("Can't remove dataset while editing"); } datasets = datasets.filter(function(d) { return d != target; }); }; this.getDatasets = function() { return datasets; }; this.getLayers = function() { var layers = []; this.forEachLayer(function(lyr, dataset) { layers.push(layerObject(lyr, dataset)); }); return layers; }; this.selectNextLayer = function() { var layers = this.getLayers(), idx = indexOfLayer(editing.layer, layers), next; if (layers.length > 1 && idx > -1) { next = layers[(idx + 1) % layers.length]; this.selectLayer(next.layer, next.dataset); } }; this.selectPrevLayer = function() { var layers = this.getLayers(), idx = indexOfLayer(editing.layer, layers), prev; if (layers.length > 1 && idx > -1) { prev = layers[idx === 0 ? layers.length - 1 : idx - 1]; this.selectLayer(prev.layer, prev.dataset); } }; this.selectLayer = function(lyr, dataset) { this.updated({select: true}, lyr, dataset); }; this.addDataset = function(dataset) { this.updated({select: true, import: true}, dataset.layers[0], dataset); }; this.updated = function(flags, lyr, dataset) { var e; flags = flags || {}; if (lyr && dataset && (!editing || editing.layer != lyr)) { setEditingLayer(lyr, dataset); flags.select = true; } if (editing) { if (flags.select) { this.dispatchEvent('select', editing); } e = utils.extend({flags: flags}, editing); this.dispatchEvent('update', e); } }; this.getEditingLayer = function() { return editing || {}; }; this.getMode = function() { return mode; }; // return a function to trigger this mode this.addMode = function(name, enter, exit) { this.on('mode', function(e) { if (e.prev == name) { exit(); } if (e.name == name) { enter(); } }); }; this.addMode(null, function() {}, function() {}); // null mode this.clearMode = function() { self.enterMode(null); }; this.enterMode = function(next) { var prev = mode; if (next != prev) { mode = next; self.dispatchEvent('mode', {name: next, prev: prev}); } }; function setEditingLayer(lyr, dataset) { if (editing && editing.layer == lyr) { return; } if (dataset.layers.indexOf(lyr) == -1) { error("Selected layer not found"); } if (datasets.indexOf(dataset) == -1) { datasets.push(dataset); } editing = layerObject(lyr, dataset); } function layerObject(lyr, dataset) { return { layer: lyr, dataset: dataset }; } function indexOfLayer(lyr, layers) { var idx = -1; layers.forEach(function(o, i) { if (o.layer == lyr) idx = i; }); return idx; } } utils.inherit(Model, EventDispatcher); Browser.onload(function() { if (!gui.browserIsSupported()) { El("#mshp-not-supported").show(); return; } gui.startEditing(); if (window.location.hostname == 'localhost') { window.addEventListener('beforeunload', function() { // send termination signal for mapshaper-gui var req = new XMLHttpRequest(); req.open('GET', '/close'); req.send(); }); } }); gui.startEditing = function() { var model = new Model(), dataLoaded = false, map, repair, simplify; gui.startEditing = function() {}; gui.alert = new ErrorMessages(model); map = new MshpMap(model); repair = new RepairControl(model, map); simplify = new SimplifyControl(model); new ImportFileProxy(model); new ImportControl(model); new ExportControl(model); new LayerControl(model); model.on('select', function() { if (!dataLoaded) { dataLoaded = true; El('#mode-buttons').show(); El('#nav-buttons').show(); new Console(model); } }); // TODO: untangle dependencies between SimplifyControl, RepairControl and Map simplify.on('simplify-start', function() { repair.hide(); }); simplify.on('simplify-end', function() { repair.update(); }); simplify.on('change', function(e) { map.setSimplifyPct(e.value); }); }; }()); ================================================ FILE: mapshaper.js ================================================ (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o max ? max : val); }, interpolate: function(val1, val2, pct) { return val1 * (1-pct) + val2 * pct; }, isArray: function(obj) { return Array.isArray(obj); }, // NaN -> true isNumber: function(obj) { // return toString.call(obj) == '[object Number]'; // ie8 breaks? return obj != null && obj.constructor == Number; }, isInteger: function(obj) { return Utils.isNumber(obj) && ((obj | 0) === obj); }, isString: function(obj) { return obj != null && obj.toString === String.prototype.toString; // TODO: replace w/ something better. }, isBoolean: function(obj) { return obj === true || obj === false; }, // Convert an array-like object to an Array, or make a copy if @obj is an Array toArray: function(obj) { var arr; if (!Utils.isArrayLike(obj)) error("Utils.toArray() requires an array-like object"); try { arr = Array.prototype.slice.call(obj, 0); // breaks in ie8 } catch(e) { // support ie8 arr = []; for (var i=0, n=obj.length; i 0) return true; return false; }, // See https://raw.github.com/kvz/phpjs/master/functions/strings/addslashes.js addslashes: function(str) { return (str + '').replace(/[\\"']/g, '\\$&').replace(/\u0000/g, '\\0'); }, // Escape a literal string to use in a regexp. // Ref.: http://simonwillison.net/2006/Jan/20/escape/ regexEscape: function(str) { return str.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); }, defaults: function(dest) { for (var i=1, n=arguments.length; i 0) { dest.push.apply(dest, src); } return dest; }; // Returns elements in arr and not in other // (similar to underscore diff) utils.difference = function(arr, other) { var index = utils.arrayToIndex(other); return arr.filter(function(el) { return !Object.prototype.hasOwnProperty.call(index, el); }); }; // Test a string or array-like object for existence of substring or element utils.contains = function(container, item) { if (utils.isString(container)) { return container.indexOf(item) != -1; } else if (utils.isArrayLike(container)) { return utils.indexOf(container, item) != -1; } error("Expected Array or String argument"); }; utils.some = function(arr, test) { return arr.reduce(function(val, item) { return val || test(item); // TODO: short-circuit? }, false); }; utils.every = function(arr, test) { return arr.reduce(function(val, item) { return val && test(item); }, true); }; utils.find = function(arr, test, ctx) { var matches = arr.filter(test, ctx); return matches.length === 0 ? null : matches[0]; }; utils.indexOf = function(arr, item, prop) { if (prop) error("utils.indexOf() No longer supports property argument"); var nan = !(item === item); for (var i = 0, len = arr.length || 0; i < len; i++) { if (arr[i] === item) return i; if (nan && !(arr[i] === arr[i])) return i; } return -1; }; utils.range = function(len, start, inc) { var arr = [], v = start === void 0 ? 0 : start, i = inc === void 0 ? 1 : inc; while(len--) { arr.push(v); v += i; } return arr; }; utils.repeat = function(times, func) { var values = [], val; for (var i=0; i 0 ? values : void 0; }; // Calc sum, skip falsy and NaN values // Assumes: no other non-numeric objects in array // utils.sum = function(arr, info) { if (!utils.isArrayLike(arr)) error ("utils.sum() expects an array, received:", arr); var tot = 0, nan = 0, val; for (var i=0, n=arr.length; i max) max = val; } return { min: min, max: max, nan: nan }; }; utils.uniq = function(src) { var index = {}; return src.reduce(function(memo, el) { if (el in index === false) { index[el] = true; memo.push(el); } return memo; }, []); }; utils.pluck = function(arr, key) { return arr.map(function(obj) { return obj[key]; }); }; utils.countValues = function(arr) { return arr.reduce(function(memo, val) { memo[val] = (val in memo) ? memo[val] + 1 : 1; return memo; }, {}); }; utils.indexOn = function(arr, k) { return arr.reduce(function(index, o) { index[o[k]] = o; return index; }, {}); }; utils.groupBy = function(arr, k) { return arr.reduce(function(index, o) { var keyval = o[k]; if (keyval in index) { index[keyval].push(o); } else { index[keyval] = [o] } return index; }, {}); }; utils.arrayToIndex = function(arr, val) { var init = arguments.length > 1; return arr.reduce(function(index, key) { index[key] = init ? val : true; return index; }, {}); }; // Support for iterating over array-like objects, like typed arrays utils.forEach = function(arr, func, ctx) { if (!utils.isArrayLike(arr)) { throw new Error("#forEach() takes an array-like argument. " + arr); } for (var i=0, n=arr.length; i < n; i++) { func.call(ctx, arr[i], i); } }; utils.forEachProperty = function(o, func, ctx) { Object.keys(o).forEach(function(key) { func.call(ctx, o[key], key); }); }; utils.initializeArray = function(arr, init) { for (var i=0, len=arr.length; i start) { fmt = ',' + str.substring(ins, end) + fmt; end = ins; ins -= 3; } return str.substring(0, end) + fmt; }; Utils.numToStr = function(num, decimals) { return decimals >= 0 ? num.toFixed(decimals) : String(num); }; Utils.formatNumber = function(num, decimals, nullStr, showPos) { var fmt; if (isNaN(num)) { fmt = nullStr || '-'; } else { fmt = Utils.numToStr(num, decimals); fmt = Utils.addThousandsSep(fmt); if (showPos && parseFloat(fmt) > 0) { fmt = "+" + fmt; } } return fmt; }; function Transform() { this.mx = this.my = 1; this.bx = this.by = 0; } Transform.prototype.isNull = function() { return !this.mx || !this.my || isNaN(this.bx) || isNaN(this.by); }; Transform.prototype.invert = function() { var inv = new Transform(); inv.mx = 1 / this.mx; inv.my = 1 / this.my; //inv.bx = -this.bx * inv.mx; //inv.by = -this.by * inv.my; inv.bx = -this.bx / this.mx; inv.by = -this.by / this.my; return inv; }; Transform.prototype.transform = function(x, y, xy) { xy = xy || []; xy[0] = x * this.mx + this.bx; xy[1] = y * this.my + this.by; return xy; }; Transform.prototype.toString = function() { return Utils.toString(Utils.extend({}, this)); }; function Bounds() { if (arguments.length > 0) { this.setBounds.apply(this, arguments); } } Bounds.prototype.toString = function() { return JSON.stringify({ xmin: this.xmin, xmax: this.xmax, ymin: this.ymin, ymax: this.ymax }); }; Bounds.prototype.toArray = function() { return this.hasBounds() ? [this.xmin, this.ymin, this.xmax, this.ymax] : []; }; Bounds.prototype.hasBounds = function() { return this.xmin <= this.xmax && this.ymin <= this.ymax; }; Bounds.prototype.sameBounds = Bounds.prototype.equals = function(bb) { return bb && this.xmin === bb.xmin && this.xmax === bb.xmax && this.ymin === bb.ymin && this.ymax === bb.ymax; }; Bounds.prototype.width = function() { return (this.xmax - this.xmin) || 0; }; Bounds.prototype.height = function() { return (this.ymax - this.ymin) || 0; }; Bounds.prototype.area = function() { return this.width() * this.height() || 0; }; Bounds.prototype.empty = function() { this.xmin = this.ymin = this.xmax = this.ymax = void 0; return this; }; Bounds.prototype.setBounds = function(a, b, c, d) { if (arguments.length == 1) { // assume first arg is a Bounds or array if (Utils.isArrayLike(a)) { b = a[1]; c = a[2]; d = a[3]; a = a[0]; } else { b = a.ymin; c = a.xmax; d = a.ymax; a = a.xmin; } } this.xmin = a; this.ymin = b; this.xmax = c; this.ymax = d; if (a > c || b > d) this.update(); // error("Bounds#setBounds() min/max reversed:", a, b, c, d); return this; }; Bounds.prototype.centerX = function() { var x = (this.xmin + this.xmax) * 0.5; return x; }; Bounds.prototype.centerY = function() { var y = (this.ymax + this.ymin) * 0.5; return y; }; Bounds.prototype.containsPoint = function(x, y) { if (x >= this.xmin && x <= this.xmax && y <= this.ymax && y >= this.ymin) { return true; } return false; }; // intended to speed up slightly bubble symbol detection; could use intersects() instead // TODO: fix false positive where circle is just outside a corner of the box Bounds.prototype.containsBufferedPoint = Bounds.prototype.containsCircle = function(x, y, buf) { if ( x + buf > this.xmin && x - buf < this.xmax ) { if ( y - buf < this.ymax && y + buf > this.ymin ) { return true; } } return false; }; Bounds.prototype.intersects = function(bb) { if (bb.xmin <= this.xmax && bb.xmax >= this.xmin && bb.ymax >= this.ymin && bb.ymin <= this.ymax) { return true; } return false; }; Bounds.prototype.contains = function(bb) { if (bb.xmin >= this.xmin && bb.ymax <= this.ymax && bb.xmax <= this.xmax && bb.ymin >= this.ymin) { return true; } return false; }; Bounds.prototype.shift = function(x, y) { this.setBounds(this.xmin + x, this.ymin + y, this.xmax + x, this.ymax + y); }; Bounds.prototype.padBounds = function(a, b, c, d) { this.xmin -= a; this.ymin -= b; this.xmax += c; this.ymax += d; }; // Rescale the bounding box by a fraction. TODO: implement focus. // @param {number} pct Fraction of original extents // @param {number} pctY Optional amount to scale Y // Bounds.prototype.scale = function(pct, pctY) { /*, focusX, focusY*/ var halfWidth = (this.xmax - this.xmin) * 0.5; var halfHeight = (this.ymax - this.ymin) * 0.5; var kx = pct - 1; var ky = pctY === undefined ? kx : pctY - 1; this.xmin -= halfWidth * kx; this.ymin -= halfHeight * ky; this.xmax += halfWidth * kx; this.ymax += halfHeight * ky; }; // Return a bounding box with the same extent as this one. Bounds.prototype.cloneBounds = // alias so child classes can override clone() Bounds.prototype.clone = function() { return new Bounds(this.xmin, this.ymin, this.xmax, this.ymax); }; Bounds.prototype.clearBounds = function() { this.setBounds(new Bounds()); }; Bounds.prototype.mergePoint = function(x, y) { if (this.xmin === void 0) { this.setBounds(x, y, x, y); } else { // this works even if x,y are NaN if (x < this.xmin) this.xmin = x; else if (x > this.xmax) this.xmax = x; if (y < this.ymin) this.ymin = y; else if (y > this.ymax) this.ymax = y; } }; // expands either x or y dimension to match @aspect (width/height ratio) // @focusX, @focusY (optional): expansion focus, as a fraction of width and height Bounds.prototype.fillOut = function(aspect, focusX, focusY) { if (arguments.length < 3) { focusX = 0.5; focusY = 0.5; } var w = this.width(), h = this.height(), currAspect = w / h, pad; if (isNaN(aspect) || aspect <= 0) { // error condition; don't pad } else if (currAspect < aspect) { // fill out x dimension pad = h * aspect - w; this.xmin -= (1 - focusX) * pad; this.xmax += focusX * pad; } else { pad = w / aspect - h; this.ymin -= (1 - focusY) * pad; this.ymax += focusY * pad; } return this; }; Bounds.prototype.update = function() { var tmp; if (this.xmin > this.xmax) { tmp = this.xmin; this.xmin = this.xmax; this.xmax = tmp; } if (this.ymin > this.ymax) { tmp = this.ymin; this.ymin = this.ymax; this.ymax = tmp; } }; Bounds.prototype.transform = function(t) { this.xmin = this.xmin * t.mx + t.bx; this.xmax = this.xmax * t.mx + t.bx; this.ymin = this.ymin * t.my + t.by; this.ymax = this.ymax * t.my + t.by; this.update(); return this; }; // Returns a Transform object for mapping this onto Bounds @b2 // @flipY (optional) Flip y-axis coords, for converting to/from pixel coords // Bounds.prototype.getTransform = function(b2, flipY) { var t = new Transform(); t.mx = b2.width() / this.width(); t.bx = b2.xmin - t.mx * this.xmin; if (flipY) { t.my = -b2.height() / this.height(); t.by = b2.ymax - t.my * this.ymin; } else { t.my = b2.height() / this.height(); t.by = b2.ymin - t.my * this.ymin; } return t; }; Bounds.prototype.mergeCircle = function(x, y, r) { if (r < 0) r = -r; this.mergeBounds([x - r, y - r, x + r, y + r]); }; Bounds.prototype.mergeBounds = function(bb) { var a, b, c, d; if (bb instanceof Bounds) { a = bb.xmin, b = bb.ymin, c = bb.xmax, d = bb.ymax; } else if (arguments.length == 4) { a = arguments[0]; b = arguments[1]; c = arguments[2]; d = arguments[3]; } else if (bb.length == 4) { // assume array: [xmin, ymin, xmax, ymax] a = bb[0], b = bb[1], c = bb[2], d = bb[3]; } else { error("Bounds#mergeBounds() invalid argument:", bb); } if (this.xmin === void 0) { this.setBounds(a, b, c, d); } else { if (a < this.xmin) this.xmin = a; if (b < this.ymin) this.ymin = b; if (c > this.xmax) this.xmax = c; if (d > this.ymax) this.ymax = d; } return this; }; // Sort an array of objects based on one or more properties. // Usage: Utils.sortOn(array, key1, asc?[, key2, asc? ...]) // Utils.sortOn = function(arr) { var comparators = []; for (var i=1; i operators (strings, numbers) // null, undefined and NaN are sorted to the end of the array // Utils.genericSort = function(arr, asc) { var compare = Utils.getGenericComparator(asc); Array.prototype.sort.call(arr, compare); return arr; }; Utils.sortOnKey = function(arr, getter, asc) { var compare = Utils.getGenericComparator(asc !== false) // asc is default arr.sort(function(a, b) { return compare(getter(a), getter(b)); }); }; // Stashes keys in a temp array (better if calculating key is expensive). Utils.sortOnKey2 = function(arr, getKey, asc) { Utils.sortArrayByKeys(arr, arr.map(getKey), asc); }; Utils.sortArrayByKeys = function(arr, keys, asc) { var ids = Utils.getSortedIds(keys, asc); Utils.reorderArray(arr, ids); }; Utils.getSortedIds = function(arr, asc) { var ids = Utils.range(arr.length); Utils.sortArrayIndex(ids, arr, asc); return ids; }; Utils.sortArrayIndex = function(ids, arr, asc) { var compare = Utils.getGenericComparator(asc); ids.sort(function(i, j) { // added i, j comparison to guarantee that sort is stable var cmp = compare(arr[i], arr[j]); return cmp > 0 || cmp === 0 && i < j ? 1 : -1; }); }; Utils.reorderArray = function(arr, idxs) { var len = idxs.length; var arr2 = []; for (var i=0; i= len) error("Out-of-bounds array idx"); arr2[i] = arr[idx]; } Utils.replaceArray(arr, arr2); }; Utils.getKeyComparator = function(key, asc) { var compare = Utils.getGenericComparator(asc); return function(a, b) { return compare(a[key], b[key]); }; }; Utils.getGenericComparator = function(asc) { asc = asc !== false; return function(a, b) { var retn = 0; if (b == null) { retn = a == null ? 0 : -1; } else if (a == null) { retn = 1; } else if (a < b) { retn = asc ? -1 : 1; } else if (a > b) { retn = asc ? 1 : -1; } else if (a !== a) { retn = 1; } else if (b !== b) { retn = -1; } return retn; }; }; // Generic in-place sort (null, NaN, undefined not handled) Utils.quicksort = function(arr, asc) { Utils.quicksortPartition(arr, 0, arr.length-1); if (asc === false) Array.prototype.reverse.call(arr); // Works with typed arrays return arr; }; // Moved out of Utils.quicksort() (saw >100% speedup in Chrome with deep recursion) Utils.quicksortPartition = function (a, lo, hi) { var i = lo, j = hi, pivot, tmp; while (i < hi) { pivot = a[lo + hi >> 1]; // avoid n^2 performance on sorted arrays while (i <= j) { while (a[i] < pivot) i++; while (a[j] > pivot) j--; if (i <= j) { tmp = a[i]; a[i] = a[j]; a[j] = tmp; i++; j--; } } if (lo < j) Utils.quicksortPartition(a, lo, j); lo = i; j = hi; } }; Utils.findRankByValue = function(arr, value) { if (isNaN(value)) return arr.length; var rank = 1; for (var i=0, n=arr.length; i arr[i]) rank++; } return rank; } Utils.findValueByPct = function(arr, pct) { var rank = Math.ceil((1-pct) * (arr.length)); return Utils.findValueByRank(arr, rank); }; // See http://ndevilla.free.fr/median/median/src/wirth.c // Elements of @arr are reordered // Utils.findValueByRank = function(arr, rank) { if (!arr.length || rank < 1 || rank > arr.length) error("[findValueByRank()] invalid input"); rank = Utils.clamp(rank | 0, 1, arr.length); var k = rank - 1, // conv. rank to array index n = arr.length, l = 0, m = n - 1, i, j, val, tmp; while (l < m) { val = arr[k]; i = l; j = m; do { while (arr[i] < val) {i++;} while (val < arr[j]) {j--;} if (i <= j) { tmp = arr[i]; arr[i] = arr[j]; arr[j] = tmp; i++; j--; } } while (i <= j); if (j < k) l = i; if (k < i) m = j; } return arr[k]; }; // // Utils.findMedian = function(arr) { var n = arr.length, rank = Math.floor(n / 2) + 1, median = Utils.findValueByRank(arr, rank); if ((n & 1) == 0) { median = (median + Utils.findValueByRank(arr, rank - 1)) / 2; } return median; }; // Wrapper for DataView class for more convenient reading and writing of // binary data; Remembers endianness and read/write position. // Has convenience methods for copying from buffers, etc. // function BinArray(buf, le) { if (Utils.isNumber(buf)) { buf = new ArrayBuffer(buf); } else if (Env.inNode && buf instanceof Buffer == true) { // Since node 0.10, DataView constructor doesn't accept Buffers, // so need to copy Buffer to ArrayBuffer buf = BinArray.toArrayBuffer(buf); } if (buf instanceof ArrayBuffer == false) { error("BinArray constructor takes an integer, ArrayBuffer or Buffer argument"); } this._buffer = buf; this._bytes = new Uint8Array(buf); this._view = new DataView(buf); this._idx = 0; this._le = le !== false; } BinArray.bufferToUintArray = function(buf, wordLen) { if (wordLen == 4) return new Uint32Array(buf); if (wordLen == 2) return new Uint16Array(buf); if (wordLen == 1) return new Uint8Array(buf); error("BinArray.bufferToUintArray() invalid word length:", wordLen) }; BinArray.uintSize = function(i) { return i & 1 || i & 2 || 4; }; BinArray.bufferCopy = function(dest, destId, src, srcId, bytes) { srcId = srcId || 0; bytes = bytes || src.byteLength - srcId; if (dest.byteLength - destId < bytes) error("Buffer overflow; tried to write:", bytes); // When possible, copy buffer data in multi-byte chunks... Added this for faster copying of // shapefile data, which is aligned to 32 bits. var wordSize = Math.min(BinArray.uintSize(bytes), BinArray.uintSize(srcId), BinArray.uintSize(dest.byteLength), BinArray.uintSize(destId), BinArray.uintSize(src.byteLength)); var srcArr = BinArray.bufferToUintArray(src, wordSize), destArr = BinArray.bufferToUintArray(dest, wordSize), count = bytes / wordSize, i = srcId / wordSize, j = destId / wordSize; while (count--) { destArr[j++] = srcArr[i++]; } return bytes; }; BinArray.toArrayBuffer = function(src) { var n = src.length, dest = new ArrayBuffer(n), view = new Uint8Array(dest); for (var i=0; i= 0 ? i : this._idx); }, position: function(i) { if (i != null) { this._idx = i; return this; } return this._idx; }, readCString: function(fixedLen, asciiOnly) { var str = "", count = fixedLen >= 0 ? fixedLen : this.bytesLeft(); while (count > 0) { var byteVal = this.readUint8(); count--; if (byteVal == 0) { break; } else if (byteVal > 127 && asciiOnly) { str = null; break; } str += String.fromCharCode(byteVal); } if (fixedLen > 0 && count > 0) { this.skipBytes(count); } return str; }, writeString: function(str, maxLen) { var bytesWritten = 0, charsToWrite = str.length, cval; if (maxLen) { charsToWrite = Math.min(charsToWrite, maxLen); } for (var i=0; i 127) { trace("#writeCString() Unicode value beyond ascii range") cval = '?'.charCodeAt(0); } this.writeUint8(cval); bytesWritten++; } return bytesWritten; }, writeCString: function(str, fixedLen) { var maxChars = fixedLen ? fixedLen - 1 : null, bytesWritten = this.writeString(str, maxChars); this.writeUint8(0); // terminator bytesWritten++; if (fixedLen) { while (bytesWritten < fixedLen) { this.writeUint8(0); bytesWritten++; } } return this; }, writeBuffer: function(buf, bytes, startIdx) { this._idx += BinArray.bufferCopy(this._buffer, this._idx, buf, startIdx, bytes); return this; } }; /* A simplified version of printf formatting Format codes: %[flags][width][.precision]type supported flags: + add '+' before positive numbers 0 left-pad with '0' ' Add thousands separator width: 1 to many precision: .(1 to many) type: s string di integers f decimal numbers xX hexidecimal (unsigned) % literal '%' Examples: code val formatted %+d 1 '+1' %4i 32 ' 32' %04i 32 '0032' %x 255 'ff' %.2f 0.125 '0.13' %'f 1000 '1,000' */ // Usage: Utils.format(formatString, [values]) // Tip: When reusing the same format many times, use Utils.formatter() for 5x - 10x better performance // Utils.format = function(fmt) { var fn = Utils.formatter(fmt); var str = fn.apply(null, Array.prototype.slice.call(arguments, 1)); return str; }; function formatValue(val, matches) { var flags = matches[1]; var padding = matches[2]; var decimals = matches[3] ? parseInt(matches[3].substr(1)) : void 0; var type = matches[4]; var isString = type == 's', isHex = type == 'x' || type == 'X', isInt = type == 'd' || type == 'i', isFloat = type == 'f', isNumber = !isString; var sign = "", padDigits = 0, isZero = false, isNeg = false; var str; if (isString) { str = String(val); } else if (isHex) { str = val.toString(16); if (type == 'X') str = str.toUpperCase(); } else if (isNumber) { str = Utils.numToStr(val, isInt ? 0 : decimals); if (str[0] == '-') { isNeg = true; str = str.substr(1); } isZero = parseFloat(str) == 0; if (flags.indexOf("'") != -1 || flags.indexOf(',') != -1) { str = Utils.addThousandsSep(str); } if (!isZero) { // BUG: sign is added when num rounds to 0 if (isNeg) { sign = "\u2212"; // U+2212 } else if (flags.indexOf('+') != -1) { sign = '+'; } } } if (padding) { var strLen = str.length + sign.length; var minWidth = parseInt(padding, 10); if (strLen < minWidth) { padDigits = minWidth - strLen; var padChar = flags.indexOf('0') == -1 ? ' ' : '0'; var padStr = Utils.repeatString(padChar, padDigits); } } if (padDigits == 0) { str = sign + str; } else if (padChar == '0') { str = sign + padStr + str; } else { str = padStr + sign + str; } return str; } // Get a function for interpolating formatted values into a string. Utils.formatter = function(fmt) { var codeRxp = /%([\',+0]*)([1-9]?)((?:\.[1-9])?)([sdifxX%])/g; var literals = [], formatCodes = [], startIdx = 0, prefix = "", literal, matches; while (matches=codeRxp.exec(fmt)) { literal = fmt.substring(startIdx, codeRxp.lastIndex - matches[0].length); if (matches[0] == '%%') { prefix += literal + '%'; } else { literals.push(prefix + literal); prefix = ''; formatCodes.push(matches); } startIdx = codeRxp.lastIndex; } literals.push(prefix + fmt.substr(startIdx)); return function() { var str = literals[0], n = arguments.length; if (n != formatCodes.length) { error("[format()] Data does not match format string; format:", fmt, "data:", arguments); } for (var i=0; i= 1 ? rate : 1.2, buf; return function(size) { if (size > capacity) { capacity = Math.ceil(size * k); buf = new constructor(capacity); } return buf; }; }; utils.copyElements = function(src, i, dest, j, n, rev) { if (src === dest && j > i) error ("copy error"); var inc = 1, offs = 0; if (rev) { inc = -1; offs = n - 1; } for (var k=0; k= 0 ? arcId : ~arcId; } api.enableLogging = function() { MapShaper.LOGGING = true; return api; }; api.printError = function(err) { var msg; if (utils.isString(err)) { err = new APIError(err); } if (MapShaper.LOGGING && err.name == 'APIError') { msg = err.message; if (!/Error/.test(msg)) { msg = "Error: " + msg; } message(msg); message("Run mapshaper -h to view help"); } else { throw err; } }; MapShaper.error = function() { var msg = Utils.toArray(arguments).join(' '); throw new Error(msg); }; MapShaper.stop = function() { throw new APIError(MapShaper.formatLogArgs(arguments)); }; MapShaper.message = function() { if (MapShaper.LOGGING) { MapShaper.logArgs(arguments); } }; MapShaper.formatLogArgs = function(args) { return utils.toArray(args).join(' '); }; // Format an array of (preferably short) strings in columns for console logging. MapShaper.formatStringsAsGrid = function(arr) { // TODO: variable column width var longest = arr.reduce(function(len, str) { return Math.max(len, str.length); }, 0), colWidth = longest + 2, perLine = Math.floor(80 / colWidth) || 1; return arr.reduce(function(memo, name, i) { var col = i % perLine; if (i > 0 && col === 0) memo += '\n'; if (col < perLine - 1) { // right-pad all but rightmost column name = utils.rpad(name, colWidth - 2, ' '); } return memo + ' ' + name; }, ''); }; MapShaper.logArgs = function(args) { if (utils.isArrayLike(args)) { (console.error || console.log).call(console, MapShaper.formatLogArgs(args)); } }; MapShaper.getWorldBounds = function(e) { e = utils.isFiniteNumber(e) ? e : 1e-10; return [-180 + e, -90 + e, 180 - e, 90 - e]; }; MapShaper.probablyDecimalDegreeBounds = function(b) { var world = MapShaper.getWorldBounds(-1), // add a bit of excess bbox = (b instanceof Bounds) ? b.toArray() : b; return containsBounds(world, bbox); }; MapShaper.layerHasGeometry = function(lyr) { return MapShaper.layerHasPaths(lyr) || MapShaper.layerHasPoints(lyr); }; MapShaper.layerHasPaths = function(lyr) { return (lyr.geometry_type == 'polygon' || lyr.geometry_type == 'polyline') && MapShaper.layerHasNonNullShapes(lyr); }; MapShaper.layerHasPoints = function(lyr) { return lyr.geometry_type == 'point' && MapShaper.layerHasNonNullShapes(lyr); }; MapShaper.layerHasNonNullShapes = function(lyr) { return utils.some(lyr.shapes || [], function(shp) { return !!shp; }); }; MapShaper.requireDataFields = function(table, fields, cmd) { var prefix = cmd ? '[' + cmd + '] ' : ''; if (!table) { stop(prefix + "Missing attribute data"); } var dataFields = table.getFields(), missingFields = utils.difference(fields, dataFields); if (missingFields.length > 0) { stop(prefix + "Table is missing one or more fields:\n", missingFields, "\nExisting fields:", '\n' + MapShaper.formatStringsAsGrid(dataFields)); } }; MapShaper.requirePolygonLayer = function(lyr, msg) { if (!lyr || lyr.geometry_type !== 'polygon') stop(msg || "Expected a polygon layer"); }; MapShaper.requirePathLayer = function(lyr, msg) { if (!lyr || !MapShaper.layerHasPaths(lyr)) stop(msg || "Expected a polygon or polyline layer"); }; var R = 6378137; var D2R = Math.PI / 180; // Equirectangular projection function degreesToMeters(deg) { return deg * D2R * R; } function distance3D(ax, ay, az, bx, by, bz) { var dx = ax - bx, dy = ay - by, dz = az - bz; return Math.sqrt(dx * dx + dy * dy + dz * dz); } function distanceSq(ax, ay, bx, by) { var dx = ax - bx, dy = ay - by; return dx * dx + dy * dy; } function distance2D(ax, ay, bx, by) { var dx = ax - bx, dy = ay - by; return Math.sqrt(dx * dx + dy * dy); } function distanceSq3D(ax, ay, az, bx, by, bz) { var dx = ax - bx, dy = ay - by, dz = az - bz; return dx * dx + dy * dy + dz * dz; } function getRoundingFunction(inc) { if (!utils.isNumber(inc) || inc === 0) { error("Rounding increment must be a non-zero number."); } var inv = 1 / inc; if (inv > 1) inv = Math.round(inv); return function(x) { return Math.round(x * inv) / inv; // these alternatives show rounding error after JSON.stringify() // return Math.round(x / inc) / inv; // return Math.round(x / inc) * inc; // return Math.round(x * inv) * inc; }; } // Return id of nearest point to x, y, among x0, y0, x1, y1, ... function nearestPoint(x, y, x0, y0) { var minIdx = -1, minDist = Infinity, dist; for (var i = 0, j = 2, n = arguments.length; j < n; i++, j += 2) { dist = distanceSq(x, y, arguments[j], arguments[j+1]); if (dist < minDist) { minDist = dist; minIdx = i; } } return minIdx; } function lineIntersection(s1p1x, s1p1y, s1p2x, s1p2y, s2p1x, s2p1y, s2p2x, s2p2y) { var den = determinant2D(s1p2x - s1p1x, s1p2y - s1p1y, s2p2x - s2p1x, s2p2y - s2p1y); if (den === 0) return null; var m = orient2D(s2p1x, s2p1y, s2p2x, s2p2y, s1p1x, s1p1y) / den; var x = s1p1x + m * (s1p2x - s1p1x); var y = s1p1y + m * (s1p2y - s1p1y); return [x, y]; } // Get intersection point if segments are non-collinear, else return null // Assumes that segments intersect function crossIntersection(s1p1x, s1p1y, s1p2x, s1p2y, s2p1x, s2p1y, s2p2x, s2p2y) { var p = lineIntersection(s1p1x, s1p1y, s1p2x, s1p2y, s2p1x, s2p1y, s2p2x, s2p2y); var nearest; if (p) { // Re-order operands so intersection point is closest to s1p1 (better precision) // Source: Jonathan Shewchuk http://www.cs.berkeley.edu/~jrs/meshpapers/robnotes.pdf nearest = nearestPoint(p[0], p[1], s1p1x, s1p1y, s1p2x, s1p2y, s2p1x, s2p1y, s2p2x, s2p2y); if (nearest == 1) { // use b a c d p = lineIntersection(s1p2x, s1p2y, s1p1x, s1p1y, s2p1x, s2p1y, s2p2x, s2p2y); } else if (nearest == 2) { // use c d a b p = lineIntersection(s2p1x, s2p1y, s2p2x, s2p2y, s1p1x, s1p1y, s1p2x, s1p2y); } else if (nearest == 3) { // use d c a b p = lineIntersection(s2p2x, s2p2y, s2p1x, s2p1y, s1p1x, s1p1y, s1p2x, s1p2y); } } return p; } // Source: Sedgewick, _Algorithms in C_ // (Tried various other functions that failed owing to floating point errors) function segmentHit(s1p1x, s1p1y, s1p2x, s1p2y, s2p1x, s2p1y, s2p2x, s2p2y) { return orient2D(s1p1x, s1p1y, s1p2x, s1p2y, s2p1x, s2p1y) * orient2D(s1p1x, s1p1y, s1p2x, s1p2y, s2p2x, s2p2y) <= 0 && orient2D(s2p1x, s2p1y, s2p2x, s2p2y, s1p1x, s1p1y) * orient2D(s2p1x, s2p1y, s2p2x, s2p2y, s1p2x, s1p2y) <= 0; } function inside(x, minX, maxX) { return x > minX && x < maxX; } function sortSeg(x1, y1, x2, y2) { return x1 < x2 || x1 == x2 && y1 < y2 ? [x1, y1, x2, y2] : [x2, y2, x1, y1]; } // Assume segments s1 and s2 are collinear and overlap; find one or two internal endpoints points // TODO: refactor function collinearIntersection(s1p1x, s1p1y, s1p2x, s1p2y, s2p1x, s2p1y, s2p2x, s2p2y) { var minX = Math.min(s1p1x, s1p2x, s2p1x, s2p2x), maxX = Math.max(s1p1x, s1p2x, s2p1x, s2p2x), minY = Math.min(s1p1y, s1p2y, s2p1y, s2p2y), maxY = Math.max(s1p1y, s1p2y, s2p1y, s2p2y), useY = maxY - minY > maxX - minX, coords = []; if (useY ? inside(s1p1y, minY, maxY) : inside(s1p1x, minX, maxX)) { coords.push(s1p1x, s1p1y); } if (useY ? inside(s1p2y, minY, maxY) : inside(s1p2x, minX, maxX)) { coords.push(s1p2x, s1p2y); } if (useY ? inside(s2p1y, minY, maxY) : inside(s2p1x, minX, maxX)) { coords.push(s2p1x, s2p1y); } if (useY ? inside(s2p2y, minY, maxY) : inside(s2p2x, minX, maxX)) { coords.push(s2p2x, s2p2y); } if (coords.length != 2 && coords.length != 4) { // e.g. congruent segments trace("Invalid collinear segment intersection", coords); coords = null; } else if (coords.length == 4 && coords[0] == coords[2] && coords[1] == coords[3]) { // segs that meet in the middle don't count coords = null; } return coords; } function endpointHit(s1p1x, s1p1y, s1p2x, s1p2y, s2p1x, s2p1y, s2p2x, s2p2y) { return s1p1x == s2p1x && s1p1y == s2p1y || s1p1x == s2p2x && s1p1y == s2p2y || s1p2x == s2p1x && s1p2y == s2p1y || s1p2x == s2p2x && s1p2y == s2p2y; } // Find intersections between two 2D segments. // Return [x, y] point if segments intersect at a single point or are overlapping+collinear // and one endpoint is inside overlapping portion // Return [x1, y1, x2, y2] if segments are overlapping+collinear and have two endpoints inside overlapping portion // Return null if segments do not touch function segmentIntersection(s1p1x, s1p1y, s1p2x, s1p2y, s2p1x, s2p1y, s2p2x, s2p2y) { var hit = segmentHit(s1p1x, s1p1y, s1p2x, s1p2y, s2p1x, s2p1y, s2p2x, s2p2y), p = null; if (hit) { p = crossIntersection(s1p1x, s1p1y, s1p2x, s1p2y, s2p1x, s2p1y, s2p2x, s2p2y); if (!p) { // colinear if p is null p = collinearIntersection(s1p1x, s1p1y, s1p2x, s1p2y, s2p1x, s2p1y, s2p2x, s2p2y); } else if (endpointHit(s1p1x, s1p1y, s1p2x, s1p2y, s2p1x, s2p1y, s2p2x, s2p2y)) { p = null; // filter out segments that only intersect at an endpoint } } return p; } // Determinant of matrix // | a b | // | c d | function determinant2D(a, b, c, d) { return a * d - b * c; } // Source: Jonathan Shewchuk http://www.cs.berkeley.edu/~jrs/meshpapers/robnotes.pdf function orient2D(x0, y0, x1, y1, x2, y2) { return determinant2D(x0 - x2, y0 - y2, x1 - x2, y1 - y2); } // atan2() makes this function fairly slow, replaced by ~2x faster formula function innerAngle2(ax, ay, bx, by, cx, cy) { var a1 = Math.atan2(ay - by, ax - bx), a2 = Math.atan2(cy - by, cx - bx), a3 = Math.abs(a1 - a2); if (a3 > Math.PI) { a3 = 2 * Math.PI - a3; } return a3; } // Return angle abc in range [0, 2PI) or NaN if angle is invalid // (e.g. if length of ab or bc is 0) /* function signedAngle2(ax, ay, bx, by, cx, cy) { var a1 = Math.atan2(ay - by, ax - bx), a2 = Math.atan2(cy - by, cx - bx), a3 = a2 - a1; if (ax == bx && ay == by || bx == cx && by == cy) { a3 = NaN; // Use NaN for invalid angles } else if (a3 >= Math.PI * 2) { a3 = 2 * Math.PI - a3; } else if (a3 < 0) { a3 = a3 + 2 * Math.PI; } return a3; } */ function standardAngle(a) { var twoPI = Math.PI * 2; while (a < 0) { a += twoPI; } while (a >= twoPI) { a -= twoPI; } return a; } function signedAngle(ax, ay, bx, by, cx, cy) { if (ax == bx && ay == by || bx == cx && by == cy) { return NaN; // Use NaN for invalid angles } var abx = ax - bx, aby = ay - by, cbx = cx - bx, cby = cy - by, dotp = abx * cbx + aby * cby, crossp = abx * cby - aby * cbx, a = Math.atan2(crossp, dotp); return standardAngle(a); } // Calc bearing in radians at lng1, lat1 function bearing(lng1, lat1, lng2, lat2) { var D2R = Math.PI / 180; lng1 *= D2R; lng2 *= D2R; lat1 *= D2R; lat2 *= D2R; var y = Math.sin(lng2-lng1) * Math.cos(lat2), x = Math.cos(lat1)*Math.sin(lat2) - Math.sin(lat1)*Math.cos(lat2)*Math.cos(lng2-lng1); return Math.atan2(y, x); } // Calc angle of turn from ab to bc, in range [0, 2PI) // Receive lat-lng values in degrees function signedAngleSph(alng, alat, blng, blat, clng, clat) { if (alng == blng && alat == blat || blng == clng && blat == clat) { return NaN; } var b1 = bearing(blng, blat, alng, alat), // calc bearing at b b2 = bearing(blng, blat, clng, clat), a = Math.PI * 2 + b1 - b2; return standardAngle(a); } /* // Convert arrays of lng and lat coords (xsrc, ysrc) into // x, y, z coords (meters) on the most common spherical Earth model. // function convLngLatToSph(xsrc, ysrc, xbuf, ybuf, zbuf) { var deg2rad = Math.PI / 180, r = R; for (var i=0, len=xsrc.length; i= 1 - 1e-14) { theta = 0; } else if (dotp <= -1 + 1e-14) { theta = Math.PI; } else { theta = Math.acos(dotp); // consider using other formula at small dp } } return theta; } function innerAngle3D(ax, ay, az, bx, by, bz, cx, cy, cz) { var ab = distance3D(ax, ay, az, bx, by, bz), bc = distance3D(bx, by, bz, cx, cy, cz), theta, dotp; if (ab === 0 || bc === 0) { theta = 0; } else { dotp = ((ax - bx) * (cx - bx) + (ay - by) * (cy - by) + (az - bz) * (cz - bz)) / (ab * bc); if (dotp >= 1) { theta = 0; } else if (dotp <= -1) { theta = Math.PI; } else { theta = Math.acos(dotp); // consider using other formula at small dp } } return theta; } function triangleArea(ax, ay, bx, by, cx, cy) { var area = Math.abs(((ay - cy) * (bx - cx) + (by - cy) * (cx - ax)) / 2); return area; } function detSq(ax, ay, bx, by, cx, cy) { var det = ax * by - ax * cy + bx * cy - bx * ay + cx * ay - cx * by; return det * det; } function cosine(ax, ay, bx, by, cx, cy) { var den = distance2D(ax, ay, bx, by) * distance2D(bx, by, cx, cy), cos = 0; if (den > 0) { cos = ((ax - bx) * (cx - bx) + (ay - by) * (cy - by)) / den; if (cos > 1) cos = 1; // handle fp rounding error else if (cos < -1) cos = -1; } return cos; } function cosine3D(ax, ay, az, bx, by, bz, cx, cy, cz) { var den = distance3D(ax, ay, az, bx, by, bz) * distance3D(bx, by, bz, cx, cy, cz), cos = 0; if (den > 0) { cos = ((ax - bx) * (cx - bx) + (ay - by) * (cy - by) + (az - bz) * (cz - bz)) / den; if (cos > 1) cos = 1; // handle fp rounding error else if (cos < -1) cos = -1; } return cos; } function triangleArea3D(ax, ay, az, bx, by, bz, cx, cy, cz) { var area = 0.5 * Math.sqrt(detSq(ax, ay, bx, by, cx, cy) + detSq(ax, az, bx, bz, cx, cz) + detSq(ay, az, by, bz, cy, cz)); return area; } // Given point B and segment AC, return the squared distance from B to the // nearest point on AC // Receive the squared length of segments AB, BC, AC // function apexDistSq(ab2, bc2, ac2) { var dist2; if (ac2 === 0) { dist2 = ab2; } else if (ab2 >= bc2 + ac2) { dist2 = bc2; } else if (bc2 >= ab2 + ac2) { dist2 = ab2; } else { var dval = (ab2 + ac2 - bc2); dist2 = ab2 - dval * dval / ac2 * 0.25; } if (dist2 < 0) { dist2 = 0; } return dist2; } function pointSegDistSq(ax, ay, bx, by, cx, cy) { var ab2 = distanceSq(ax, ay, bx, by), ac2 = distanceSq(ax, ay, cx, cy), bc2 = distanceSq(bx, by, cx, cy); return apexDistSq(ab2, ac2, bc2); } function pointSegDistSq3D(ax, ay, az, bx, by, bz, cx, cy, cz) { var ab2 = distanceSq3D(ax, ay, az, bx, by, bz), ac2 = distanceSq3D(ax, ay, az, cx, cy, cz), bc2 = distanceSq3D(bx, by, bz, cx, cy, cz); return apexDistSq(ab2, ac2, bc2); } MapShaper.calcArcBounds = function(xx, yy, start, len) { var i = start | 0, n = isNaN(len) ? xx.length - i : len + i, x, y, xmin, ymin, xmax, ymax; if (n > 0) { xmin = xmax = xx[i]; ymin = ymax = yy[i]; } for (i++; i xmax) xmax = x; if (y < ymin) ymin = y; if (y > ymax) ymax = y; } return [xmin, ymin, xmax, ymax]; }; MapShaper.reversePathCoords = function(arr, start, len) { var i = start, j = start + len - 1, tmp; while (i < j) { tmp = arr[i]; arr[i] = arr[j]; arr[j] = tmp; i++; j--; } }; // merge B into A function mergeBounds(a, b) { if (b[0] < a[0]) a[0] = b[0]; if (b[1] < a[1]) a[1] = b[1]; if (b[2] > a[2]) a[2] = b[2]; if (b[3] > a[3]) a[3] = b[3]; } function containsBounds(a, b) { return a[0] <= b[0] && a[2] >= b[2] && a[1] <= b[1] && a[3] >= b[3]; } function boundsArea(b) { return (b[2] - b[0]) * (b[3] - b[1]); } // export functions so they can be tested var geom = { R: R, D2R: D2R, degreesToMeters: degreesToMeters, getRoundingFunction: getRoundingFunction, segmentHit: segmentHit, segmentIntersection: segmentIntersection, distanceSq: distanceSq, distance3D: distance3D, innerAngle: innerAngle, innerAngle2: innerAngle2, signedAngle: signedAngle, bearing: bearing, signedAngleSph: signedAngleSph, standardAngle: standardAngle, convLngLatToSph: convLngLatToSph, lngLatToXYZ: lngLatToXYZ, xyzToLngLat: xyzToLngLat, sphericalDistance: sphericalDistance, greatCircleDistance: greatCircleDistance, pointSegDistSq: pointSegDistSq, pointSegDistSq3D: pointSegDistSq3D, innerAngle3D: innerAngle3D, triangleArea: triangleArea, triangleArea3D: triangleArea3D, cosine: cosine, cosine3D: cosine3D }; // Constructor takes arrays of coords: xx, yy, zz (optional) // // Iterate over the points of an arc // properties: x, y // method: hasNext() // usage: // while (iter.hasNext()) { // iter.x, iter.y; // do something w/ x & y // } // function ArcIter(xx, yy) { this._i = 0; this._n = 0; this._inc = 1; this._xx = xx; this._yy = yy; this.i = 0; this.x = 0; this.y = 0; } ArcIter.prototype.init = function(i, len, fw) { if (fw) { this._i = i; this._inc = 1; } else { this._i = i + len - 1; this._inc = -1; } this._n = len; return this; }; ArcIter.prototype.hasNext = function() { var i = this._i; if (this._n > 0) { this._i = i + this._inc; this.x = this._xx[i]; this.y = this._yy[i]; this.i = i; this._n--; return true; } return false; }; function FilteredArcIter(xx, yy, zz) { var _zlim = 0, _i = 0, _inc = 1, _stop = 0; this.init = function(i, len, fw, zlim) { _zlim = zlim || 0; if (fw) { _i = i; _inc = 1; _stop = i + len; } else { _i = i + len - 1; _inc = -1; _stop = i - 1; } return this; }; this.hasNext = function() { // using local vars is significantly faster when skipping many points var zarr = zz, i = _i, j = i, zlim = _zlim, stop = _stop, inc = _inc; if (i == stop) return false; do { j += inc; } while (j != stop && zarr[j] < zlim); _i = j; this.x = xx[i]; this.y = yy[i]; this.i = i; return true; }; } // Iterate along a path made up of one or more arcs. // Similar interface to ArcIter() // function ShapeIter(arcs) { this._arcs = arcs; this._i = 0; this._n = 0; this.x = 0; this.y = 0; } ShapeIter.prototype.hasNext = function() { var arc = this._arc; if (this._i < this._n === false) { return false; } if (arc.hasNext()) { this.x = arc.x; this.y = arc.y; return true; } this.nextArc(); return this.hasNext(); }; ShapeIter.prototype.init = function(ids) { this._ids = ids; this._n = ids.length; this.reset(); return this; }; ShapeIter.prototype.nextArc = function() { var i = this._i + 1; if (i < this._n) { this._arc = this._arcs.getArcIter(this._ids[i]); if (i > 0) this._arc.hasNext(); // skip first point } this._i = i; }; ShapeIter.prototype.reset = function() { this._i = -1; this.nextArc(); }; // An interface for managing a collection of paths. // Constructor signatures: // // ArcCollection(arcs) // arcs is an array of polyline arcs; each arc is an array of points: [[x0, y0], [x1, y1], ... ] // // ArcCollection(nn, xx, yy) // nn is an array of arc lengths; xx, yy are arrays of concatenated coords; function ArcCollection() { var _xx, _yy, // coordinates data _ii, _nn, // indexes, sizes _zz, _zlimit = 0, // simplification _bb, _allBounds, // bounding boxes _arcIter, _filteredArcIter; // path iterators if (arguments.length == 1) { initLegacyArcs(arguments[0]); // want to phase this out } else if (arguments.length == 3) { initXYData.apply(this, arguments); } else { error("ArcCollection() Invalid arguments"); } function initLegacyArcs(arcs) { var xx = [], yy = []; var nn = arcs.map(function(points) { var n = points ? points.length : 0; for (var i=0; i 0) { j = i * 4; b = MapShaper.calcArcBounds(xx, yy, arcOffs, arcLen); bb[j++] = b[0]; bb[j++] = b[1]; bb[j++] = b[2]; bb[j] = b[3]; arcOffs += arcLen; bounds.mergeBounds(b); } } return { bb: bb, bounds: bounds }; } this.updateVertexData = function(nn, xx, yy, zz) { initXYData(nn, xx, yy); initZData(zz || null); }; // Give access to raw data arrays... this.getVertexData = function() { return { xx: _xx, yy: _yy, zz: _zz, bb: _bb, nn: _nn, ii: _ii }; }; this.getCopy = function() { var copy = new ArcCollection(new Int32Array(_nn), new Float64Array(_xx), new Float64Array(_yy)); if (_zz) copy.setThresholds(new Float64Array(_zz)); return copy; }; function getFilteredPointCount() { var zz = _zz, z = _zlimit; if (!zz || !z) return this.getPointCount(); var count = 0; for (var i=0, n = zz.length; i= z) count++; } return count; } function getFilteredVertexData() { var len2 = getFilteredPointCount(); var arcCount = _nn.length; var xx2 = new Float64Array(len2), yy2 = new Float64Array(len2), zz2 = new Float64Array(len2), nn2 = new Int32Array(arcCount), i=0, i2 = 0, n, n2; for (var arcId=0; arcId < arcCount; arcId++) { n2 = 0; n = _nn[arcId]; for (var end = i+n; i < end; i++) { if (_zz[i] >= _zlimit) { xx2[i2] = _xx[i]; yy2[i2] = _yy[i]; zz2[i2] = _zz[i]; i2++; n2++; } } if (n2 < 2) error("Collapsed arc"); // endpoints should be z == Infinity nn2[arcId] = n2; } return { xx: xx2, yy: yy2, zz: zz2, nn: nn2 }; } this.getFilteredCopy = function() { if (!_zz || _zlimit === 0) return this.getCopy(); var data = getFilteredVertexData(); var copy = new ArcCollection(data.nn, data.xx, data.yy); copy.setThresholds(data.zz); return copy; }; // Return arcs as arrays of [x, y] points (intended for testing). this.toArray = function() { var arr = []; this.forEach(function(iter) { var arc = []; while (iter.hasNext()) { arc.push([iter.x, iter.y]); } arr.push(arc); }); return arr; }; this.toString = function() { return JSON.stringify(this.toArray()); }; // @cb function(i, j, xx, yy) this.forEachArcSegment = function(arcId, cb) { var fw = arcId >= 0, absId = fw ? arcId : ~arcId, zlim = this.getRetainedInterval(), n = _nn[absId], step = fw ? 1 : -1, v1 = fw ? _ii[absId] : _ii[absId] + n - 1, v2 = v1, count = 0; for (var j = 1; j < n; j++) { v2 += step; if (zlim === 0 || _zz[v2] >= zlim) { cb(v1, v2, _xx, _yy); v1 = v2; count++; } } return count; }; // @cb function(i, j, xx, yy) this.forEachSegment = function(cb) { var count = 0; for (var i=0, n=this.size(); i -1) { copyElements(_xx, _ii[i], _xx, goodPoints, arcLen); copyElements(_yy, _ii[i], _yy, goodPoints, arcLen); if (_zz) copyElements(_zz, _ii[i], _zz, goodPoints, arcLen); _nn[k] = arcLen; goodPoints += arcLen; goodArcs++; } } initXYData(_nn.subarray(0, goodArcs), _xx.subarray(0, goodPoints), _yy.subarray(0, goodPoints)); if (_zz) initZData(_zz.subarray(0, goodPoints)); } this.dedupCoords = function() { var arcId = 0, i = 0, i2 = 0, arcCount = this.size(), zz = _zz, arcLen, arcLen2; while (arcId < arcCount) { arcLen = _nn[arcId]; arcLen2 = MapShaper.dedupArcCoords(i, i2, arcLen, _xx, _yy, zz); _nn[arcId] = arcLen2; i += arcLen; i2 += arcLen2; arcId++; } if (i > i2) { initXYData(_nn, _xx.subarray(0, i2), _yy.subarray(0, i2)); if (zz) initZData(zz.subarray(0, i2)); } return i - i2; }; this.getVertex = function(arcId, nth) { var i = this.indexOfVertex(arcId, nth); return { x: _xx[i], y: _yy[i] }; }; this.indexOfVertex = function(arcId, nth) { var absId = arcId < 0 ? ~arcId : arcId, len = _nn[absId]; if (nth < 0) nth = len + nth; if (absId != arcId) nth = len - nth - 1; if (nth < 0 || nth >= len) error("[ArcCollection] out-of-range vertex id"); return _ii[absId] + nth; }; // Test whether the vertex at index @idx is the endpoint of an arc this.pointIsEndpoint = function(idx) { var ii = _ii, nn = _nn; for (var j=0, n=ii.length; j 0) { if (x != iter.x || y != iter.y) return false; } x = iter.x; y = iter.y; i++; } return true; }; this.getArcLength = function(arcId) { return _nn[absArcId(arcId)]; }; this.getArcIter = function(arcId) { var fw = arcId >= 0, i = fw ? arcId : ~arcId, iter = _zz && _zlimit ? _filteredArcIter : _arcIter; if (i >= _nn.length) { error("#getArcId() out-of-range arc id:", arcId); } return iter.init(_ii[i], _nn[i], fw, _zlimit); }; this.getShapeIter = function(ids) { return new ShapeIter(this).init(ids); }; // Add simplification data to the dataset // @thresholds is either a single typed array or an array of arrays of removal thresholds for each arc; // this.setThresholds = function(thresholds) { var n = this.getPointCount(), zz = null; if (!thresholds) { // nop } else if (thresholds.length == n) { zz = thresholds; } else if (thresholds.length == this.size()) { zz = flattenThresholds(thresholds, n); } else { error("Invalid threshold data"); } initZData(zz); return this; }; function flattenThresholds(arr, n) { var zz = new Float64Array(n), i = 0; arr.forEach(function(arr) { for (var j=0, n=arr.length; j 0) { var data = getFilteredVertexData(); this.updateVertexData(data.nn, data.xx, data.yy); _zlimit = 0; } else { _zz = null; } }; this.getRetainedInterval = function() { return _zlimit; }; this.setRetainedInterval = function(z) { _zlimit = z; return this; }; this.getRetainedPct = function() { return this.getPctByThreshold(_zlimit); }; this.setRetainedPct = function(pct) { if (pct >= 1) { _zlimit = 0; } else { _zlimit = this.getThresholdByPct(pct); _zlimit = MapShaper.clampIntervalByPct(_zlimit, pct); } return this; }; // Return array of z-values that can be removed for simplification // this.getRemovableThresholds = function(nth) { if (!_zz) error("[arcs] Missing simplification data."); var skip = nth | 1, arr = new Float64Array(Math.ceil(_zz.length / skip)), z; for (var i=0, j=0, n=this.getPointCount(); i= 0 && arcId < this.size())) { error("[arcs] Invalid arc id:", arcId); } var start = _ii[arcId], end = start + _nn[arcId]; return _zz.subarray(start, end); }; this.getPctByThreshold = function(val) { var arr, rank, pct; if (val > 0) { arr = this.getRemovableThresholds(); rank = utils.findRankByValue(arr, val); pct = arr.length > 0 ? 1 - (rank - 1) / arr.length : 1; } else { pct = 1; } return pct; }; this.getThresholdByPct = function(pct) { var tmp = this.getRemovableThresholds(), rank, z; if (tmp.length === 0) { // No removable points rank = 0; } else { rank = Math.floor((1 - pct) * (tmp.length + 2)); } if (rank <= 0) { z = 0; } else if (rank > tmp.length) { z = Infinity; } else { z = utils.findValueByRank(tmp, rank); } return z; }; this.arcIntersectsBBox = function(i, b1) { var b2 = _bb, j = i * 4; return b2[j] <= b1[2] && b2[j+2] >= b1[0] && b2[j+3] >= b1[1] && b2[j+1] <= b1[3]; }; this.arcIsContained = function(i, b1) { var b2 = _bb, j = i * 4; return b2[j] >= b1[0] && b2[j+2] <= b1[2] && b2[j+1] >= b1[1] && b2[j+3] <= b1[3]; }; this.arcIsSmaller = function(i, units) { var bb = _bb, j = i * 4; return bb[j+2] - bb[j] < units && bb[j+3] - bb[j+1] < units; }; // TODO: allow datasets in lat-lng coord range to be flagged as planar this.isPlanar = function() { return !MapShaper.probablyDecimalDegreeBounds(this.getBounds()); }; this.size = function() { return _ii && _ii.length || 0; }; this.getPointCount = function() { return _xx && _xx.length || 0; }; this.getBounds = function() { return _allBounds; }; this.getSimpleShapeBounds = function(arcIds, bounds) { bounds = bounds || new Bounds(); for (var i=0, n=arcIds.length; i bbox[2]) bbox[2] = bb[id]; if (bb[++id] > bbox[3]) bbox[3] = bb[id]; } return bbox; }; this.getMultiShapeBounds = function(shapeIds, bounds) { bounds = bounds || new Bounds(); if (shapeIds) { // handle null shapes for (var i=0, n=shapeIds.length; i 0 && (keep || zz[j] > zz[i])) { zz[i] = zz[j]; } n++; } return n2 > 1 ? n2 : 0; }; MapShaper.countPointsInLayer = function(lyr) { var count = 0; if (MapShaper.layerHasPoints(lyr)) { MapShaper.forEachPoint(lyr.shapes, function() {count++;}); } return count; }; MapShaper.forEachPoint = function(shapes, cb) { shapes.forEach(function(shape, id) { var n = shape ? shape.length : 0; for (var i=0; i= 1) z = 0; return z; }; MapShaper.findNextRemovableVertices = function(zz, zlim, start, end) { var i = MapShaper.findNextRemovableVertex(zz, zlim, start, end), arr, k; if (i > -1) { k = zz[i]; arr = [i]; while (++i < end) { if (zz[i] == k) { arr.push(i); } } } return arr || null; }; // Return id of the vertex between @start and @end with the highest // threshold that is less than @zlim, or -1 if none // MapShaper.findNextRemovableVertex = function(zz, zlim, start, end) { var tmp, jz = 0, j = -1, z; if (start > end) { tmp = start; start = end; end = tmp; } for (var i=start+1; i jz) { j = i; jz = z; } } return j; }; // Visit each arc id in a shape (array of array of arc ids) // Use non-undefined return values of callback @cb as replacements. MapShaper.forEachArcId = function(arr, cb) { var item; for (var i=0; i 0) { return paths.filter(function(ids) {return !!ids;}); } else { return paths; } }; MapShaper.forEachPathSegment = function(shape, arcs, cb) { MapShaper.forEachArcId(shape, function(arcId) { arcs.forEachArcSegment(arcId, cb); }); }; MapShaper.traversePaths = function traversePaths(shapes, cbArc, cbPart, cbShape) { var segId = 0; shapes.forEach(function(parts, shapeId) { if (!parts || parts.length === 0) return; // null shape var arcIds, arcId; if (cbShape) { cbShape(shapeId); } for (var i=0, m=parts.length; i 0) shape2.push(path); }); return shape2.length > 0 ? shape2 : null; }; // Bundle holes with their containing rings for Topo/GeoJSON polygon export. // Assumes outer rings are CW and inner (hole) rings are CCW. // @paths array of objects with path metadata -- see MapShaper.exportPathData() // // TODO: Improve reliability. Currently uses winding order, area and bbox to // identify holes and their enclosures -- could be confused by strange // geometry. // MapShaper.groupPolygonRings = function(paths) { var pos = [], neg = []; if (paths) { paths.forEach(function(path) { if (path.area > 0) { pos.push(path); } else if (path.area < 0) { neg.push(path); } else { // verbose("Zero-area ring, skipping"); } }); } var output = pos.map(function(part) { return [part]; }); neg.forEach(function(hole) { var containerId = -1, containerArea = 0; for (var i=0, n=pos.length; i -hole.area; if (contained && (containerArea === 0 || part.area < containerArea)) { containerArea = part.area; containerId = i; } } if (containerId == -1) { verbose("[groupPolygonRings()] polygon hole is missing a containing ring, dropping."); } else { output[containerId].push(hole); } }); return output; }; MapShaper.getPathMetadata = function(shape, arcs, type) { return (shape || []).map(function(ids) { if (!utils.isArray(ids)) throw new Error("expected array"); return { ids: ids, area: type == 'polygon' ? geom.getPlanarPathArea(ids, arcs) : 0, bounds: arcs.getSimpleShapeBounds(ids) }; }); }; MapShaper.quantizeArcs = function(arcs, quanta) { // Snap coordinates to a grid of @quanta locations on both axes // This may snap nearby points to the same coordinates. // Consider a cleanup pass to remove dupes, make sure collapsed arcs are // removed on export. // var bb1 = arcs.getBounds(), bb2 = new Bounds(0, 0, quanta-1, quanta-1), fw = bb1.getTransform(bb2), inv = fw.invert(); arcs.transformPoints(function(x, y) { var p = fw.transform(x, y); return inv.transform(Math.round(p[0]), Math.round(p[1])); }); }; // utility functions for datasets and layers // clone all layers, make a filtered copy of arcs MapShaper.copyDataset = function(dataset) { var d2 = utils.extend({}, dataset); d2.layers = d2.layers.map(MapShaper.copyLayer); if (d2.arcs) { d2.arcs = d2.arcs.getFilteredCopy(); } return d2; }; // clone coordinate data, shallow-copy attribute data MapShaper.copyDatasetForExport = function(dataset) { var d2 = utils.extend({}, dataset); d2.layers = d2.layers.map(MapShaper.copyLayerShapes); if (d2.arcs) { d2.arcs = d2.arcs.getFilteredCopy(); } return d2; }; // make a stub copy if the no_replace option is given, else pass thru src layer MapShaper.getOutputLayer = function(src, opts) { return opts && opts.no_replace ? {geometry_type: src.geometry_type} : src; }; // Make a deep copy of a layer MapShaper.copyLayer = function(lyr) { var copy = MapShaper.copyLayerShapes(lyr); if (copy.data) { copy.data = copy.data.clone(); } return copy; }; MapShaper.copyLayerShapes = function(lyr) { var copy = utils.extend({}, lyr); if (lyr.shapes) { copy.shapes = MapShaper.cloneShapes(lyr.shapes); } return copy; }; MapShaper.getDatasetBounds = function(data) { var bounds = new Bounds(); data.layers.forEach(function(lyr) { var lyrbb = MapShaper.getLayerBounds(lyr, data.arcs); if (lyrbb) bounds.mergeBounds(lyrbb); }); return bounds; }; MapShaper.datasetHasPaths = function(dataset) { return utils.some(dataset.layers, function(lyr) { return MapShaper.layerHasPaths(lyr); }); }; MapShaper.countMultiPartFeatures = function(shapes) { var count = 0; for (var i=0, n=shapes.length; i 1) count++; } return count; }; MapShaper.getFeatureCount = function(lyr) { var count = 0; if (lyr.data) { count = lyr.data.size(); } else if (lyr.shapes) { count = lyr.shapes.length; } return count; }; MapShaper.getLayerBounds = function(lyr, arcs) { var bounds = null; if (lyr.geometry_type == 'point') { bounds = new Bounds(); MapShaper.forEachPoint(lyr.shapes, function(p) { bounds.mergePoint(p[0], p[1]); }); } else if (lyr.geometry_type == 'polygon' || lyr.geometry_type == 'polyline') { bounds = MapShaper.getPathBounds(lyr.shapes, arcs); } else { // just return null if layer has no bounds // error("Layer is missing a valid geometry type"); } return bounds; }; MapShaper.getPathBounds = function(shapes, arcs) { var bounds = new Bounds(); MapShaper.forEachArcId(shapes, function(id) { arcs.mergeArcBounds(id, bounds); }); return bounds; }; // replace cut layers in-sequence (to maintain layer indexes) // append any additional new layers MapShaper.replaceLayers = function(dataset, cutLayers, newLayers) { // modify a copy in case cutLayers == dataset.layers var currLayers = dataset.layers.concat(); utils.repeat(Math.max(cutLayers.length, newLayers.length), function(i) { var cutLyr = cutLayers[i], newLyr = newLayers[i], idx = cutLyr ? currLayers.indexOf(cutLyr) : currLayers.length; if (cutLyr) { currLayers.splice(idx, 1); } if (newLyr) { currLayers.splice(idx, 0, newLyr); } }); dataset.layers = currLayers; }; MapShaper.isolateLayer = function(layer, dataset) { return utils.defaults({ layers: dataset.layers.filter(function(lyr) {return lyr == layer;}) }, dataset); }; // @target is a layer identifier or a comma-sep. list of identifiers // an identifier is a literal name, a name containing "*" wildcard or // a 0-based array index MapShaper.findMatchingLayers = function(layers, target) { var ii = []; String(target).split(',').forEach(function(id) { var i = Number(id), rxp = utils.wildcardToRegExp(id); if (utils.isInteger(i)) { ii.push(i); // TODO: handle out-of-range index } else { layers.forEach(function(lyr, i) { if (rxp.test(lyr.name)) ii.push(i); }); } }); ii = utils.uniq(ii); // remove dupes return ii.map(function(i) { return layers[i]; }); }; // Transform the points in a dataset in-place; don't clean up corrupted shapes MapShaper.transformPoints = function(dataset, f) { if (dataset.arcs) { dataset.arcs.transformPoints(f); } dataset.layers.forEach(function(lyr) { if (MapShaper.layerHasPoints(lyr)) { MapShaper.transformPointsInLayer(lyr, f); } }); }; MapShaper.initDataTable = function(lyr) { lyr.data = new DataTable(MapShaper.getFeatureCount(lyr)); }; // Return average segment length (with simplification) MapShaper.getAvgSegment = function(arcs) { var sum = 0; var count = arcs.forEachSegment(function(i, j, xx, yy) { var dx = xx[i] - xx[j], dy = yy[i] - yy[j]; sum += Math.sqrt(dx * dx + dy * dy); }); return sum / count || 0; }; // Return average magnitudes of dx, dy (with simplification) MapShaper.getAvgSegment2 = function(arcs) { var dx = 0, dy = 0; var count = arcs.forEachSegment(function(i, j, xx, yy) { dx += Math.abs(xx[i] - xx[j]); dy += Math.abs(yy[i] - yy[j]); }); return [dx / count || 0, dy / count || 0]; }; // Return average magnitudes of dx, dy (with simplification) /* this.getAvgSegmentSph2 = function() { var sumx = 0, sumy = 0; var count = this.forEachSegment(function(i, j, xx, yy) { var lat1 = yy[i], lat2 = yy[j]; sumy += geom.degreesToMeters(Math.abs(lat1 - lat2)); sumx += geom.degreesToMeters(Math.abs(xx[i] - xx[j]) * Math.cos((lat1 + lat2) * 0.5 * geom.D2R); }); return [sumx / count || 0, sumy / count || 0]; }; */ // @xx array of x coords // @ids an array of segment endpoint ids [a0, b0, a1, b1, ...] // Sort @ids in place so that xx[a(n)] <= xx[b(n)] and xx[a(n)] <= xx[a(n+1)] MapShaper.sortSegmentIds = function(xx, ids) { MapShaper.orderSegmentIds(xx, ids); MapShaper.quicksortSegmentIds(xx, ids, 0, ids.length-2); }; MapShaper.orderSegmentIds = function(xx, ids, spherical) { function swap(i, j) { var tmp = ids[i]; ids[i] = ids[j]; ids[j] = tmp; } for (var i=0, n=ids.length; i xx[ids[i+1]]) { swap(i, i+1); } } }; MapShaper.insertionSortSegmentIds = function(arr, ids, start, end) { var id, id2; for (var j = start + 2; j <= end; j+=2) { id = ids[j]; id2 = ids[j+1]; for (var i = j - 2; i >= start && arr[id] < arr[ids[i]]; i-=2) { ids[i+2] = ids[i]; ids[i+3] = ids[i+1]; } ids[i+2] = id; ids[i+3] = id2; } }; MapShaper.quicksortSegmentIds = function (a, ids, lo, hi) { var i = lo, j = hi, pivot, tmp; while (i < hi) { pivot = a[ids[(lo + hi >> 2) << 1]]; // avoid n^2 performance on sorted arrays while (i <= j) { while (a[ids[i]] < pivot) i+=2; while (a[ids[j]] > pivot) j-=2; if (i <= j) { tmp = ids[i]; ids[i] = ids[j]; ids[j] = tmp; tmp = ids[i+1]; ids[i+1] = ids[j+1]; ids[j+1] = tmp; i+=2; j-=2; } } if (j - lo < 40) MapShaper.insertionSortSegmentIds(a, ids, lo, j); else MapShaper.quicksortSegmentIds(a, ids, lo, j); if (hi - i < 40) { MapShaper.insertionSortSegmentIds(a, ids, i, hi); return; } lo = i; j = hi; } }; // Convert an array of intersections into an ArcCollection (for display) // MapShaper.getIntersectionPoints = function(intersections) { return intersections.map(function(obj) { return [obj.x, obj.y]; }); }; // Identify intersecting segments in an ArcCollection // // To find all intersections: // 1. Assign each segment to one or more horizontal stripes/bins // 2. Find intersections inside each stripe // 3. Concat and dedup // MapShaper.findSegmentIntersections = (function() { // Re-use buffer for temp data -- Chrome's gc starts bogging down // if large buffers are repeatedly created. var buf; function getUint32Array(count) { var bytes = count * 4; if (!buf || buf.byteLength < bytes) { buf = new ArrayBuffer(bytes); } return new Uint32Array(buf, 0, count); } return function(arcs) { var bounds = arcs.getBounds(), // TODO: handle spherical bounds spherical = !arcs.isPlanar() && containsBounds(MapShaper.getWorldBounds(), bounds.toArray()), ymin = bounds.ymin, yrange = bounds.ymax - ymin, stripeCount = MapShaper.calcSegmentIntersectionStripeCount(arcs), stripeSizes = new Uint32Array(stripeCount), stripeId = stripeCount > 1 ? multiStripeId : singleStripeId, i; function multiStripeId(y) { return Math.floor((stripeCount-1) * (y - ymin) / yrange); } function singleStripeId(y) {return 0;} // Count segments in each stripe arcs.forEachSegment(function(id1, id2, xx, yy) { var s1 = stripeId(yy[id1]), s2 = stripeId(yy[id2]); while (true) { stripeSizes[s1] = stripeSizes[s1] + 2; if (s1 == s2) break; s1 += s2 > s1 ? 1 : -1; } }); // Allocate arrays for segments in each stripe var stripeData = getUint32Array(utils.sum(stripeSizes)), offs = 0; var stripes = []; utils.forEach(stripeSizes, function(stripeSize) { var start = offs; offs += stripeSize; stripes.push(stripeData.subarray(start, offs)); }); // Assign segment ids to each stripe utils.initializeArray(stripeSizes, 0); arcs.forEachSegment(function(id1, id2, xx, yy) { var s1 = stripeId(yy[id1]), s2 = stripeId(yy[id2]), count, stripe; while (true) { count = stripeSizes[s1]; stripeSizes[s1] = count + 2; stripe = stripes[s1]; stripe[count] = id1; stripe[count+1] = id2; if (s1 == s2) break; s1 += s2 > s1 ? 1 : -1; } }); // Detect intersections among segments in each stripe. var raw = arcs.getVertexData(), intersections = [], arr; for (i=0; i 0) { intersections.push.apply(intersections, arr); } } return MapShaper.dedupIntersections(intersections); }; })(); MapShaper.sortIntersections = function(arr) { arr.sort(function(a, b) { return a.x - b.x || a.y - b.y; }); }; MapShaper.dedupIntersections = function(arr) { var index = {}; return arr.filter(function(o) { var key = MapShaper.getIntersectionKey(o); if (key in index) { return false; } index[key] = true; return true; }); }; // Get an indexable key from an intersection object // Assumes that vertex ids of o.a and o.b are sorted MapShaper.getIntersectionKey = function(o) { return o.a.join(',') + ';' + o.b.join(','); }; MapShaper.calcSegmentIntersectionStripeCount = function(arcs) { var yrange = arcs.getBounds().height(), segLen = MapShaper.getAvgSegment2(arcs)[1], count = 1; if (segLen > 0 && yrange > 0) { count = Math.ceil(yrange / segLen / 20); } return count || 1; }; // Find intersections among a group of line segments // // TODO: handle case where a segment starts and ends at the same point (i.e. duplicate coords); // // @ids: Array of indexes: [s0p0, s0p1, s1p0, s1p1, ...] where xx[sip0] <= xx[sip1] // @xx, @yy: Arrays of x- and y-coordinates // MapShaper.intersectSegments = function(ids, xx, yy, spherical) { var lim = ids.length - 2, intersections = []; var s1p1, s1p2, s2p1, s2p2, s1p1x, s1p2x, s2p1x, s2p2x, s1p1y, s1p2y, s2p1y, s2p2y, hit, seg1, seg2, i, j; // Sort segments by xmin, to allow efficient exclusion of segments with // non-overlapping x extents. MapShaper.sortSegmentIds(xx, ids); // sort by ascending xmin i = 0; while (i < lim) { s1p1 = ids[i]; s1p2 = ids[i+1]; s1p1x = xx[s1p1]; s1p2x = xx[s1p2]; s1p1y = yy[s1p1]; s1p2y = yy[s1p2]; // count++; j = i; while (j < lim) { j += 2; s2p1 = ids[j]; s2p1x = xx[s2p1]; if (s1p2x < s2p1x) break; // x extent of seg 2 is greater than seg 1: done with seg 1 //if (s1p2x <= s2p1x) break; // this misses point-segment intersections when s1 or s2 is vertical s2p1y = yy[s2p1]; s2p2 = ids[j+1]; s2p2x = xx[s2p2]; s2p2y = yy[s2p2]; // skip segments with non-overlapping y ranges if (s1p1y >= s2p1y) { if (s1p1y > s2p2y && s1p2y > s2p1y && s1p2y > s2p2y) continue; } else { if (s1p1y < s2p2y && s1p2y < s2p1y && s1p2y < s2p2y) continue; } // skip segments that are adjacent in a path (optimization) // TODO: consider if this eliminates some cases that should // be detected, e.g. spikes formed by unequal segments if (s1p1 == s2p1 || s1p1 == s2p2 || s1p2 == s2p1 || s1p2 == s2p2) { continue; } // test two candidate segments for intersection hit = segmentIntersection(s1p1x, s1p1y, s1p2x, s1p2y, s2p1x, s2p1y, s2p2x, s2p2y); if (hit) { seg1 = [s1p1, s1p2]; seg2 = [s2p1, s2p2]; intersections.push(MapShaper.formatIntersection(hit, seg1, seg2, xx, yy)); if (hit.length == 4) { intersections.push(MapShaper.formatIntersection(hit.slice(2), seg1, seg2, xx, yy)); } } } i += 2; } return intersections; // @p is an [x, y] location along a segment defined by ids @id1 and @id2 // return array [i, j] where i and j are the same endpoint ids with i <= j // if @p coincides with an endpoint, return the id of that endpoint twice function getEndpointIds(id1, id2, p) { var i = id1 < id2 ? id1 : id2, j = i === id1 ? id2 : id1; if (xx[i] == p[0] && yy[i] == p[1]) { j = i; } else if (xx[j] == p[0] && yy[j] == p[1]) { i = j; } return [i, j]; } }; MapShaper.formatIntersection = function(xy, s1, s2, xx, yy) { var x = xy[0], y = xy[1], a, b; s1 = MapShaper.formatIntersectingSegment(x, y, s1[0], s1[1], xx, yy); s2 = MapShaper.formatIntersectingSegment(x, y, s2[0], s2[1], xx, yy); a = s1[0] < s2[0] ? s1 : s2; b = a == s1 ? s2 : s1; return {x: x, y: y, a: a, b: b}; }; MapShaper.formatIntersectingSegment = function(x, y, id1, id2, xx, yy) { var i = id1 < id2 ? id1 : id2, j = i === id1 ? id2 : id1; if (xx[i] == x && yy[i] == y) { j = i; } else if (xx[j] == x && yy[j] == y) { i = j; } return [i, j]; }; // Calculations for planar geometry of shapes // TODO: consider 3D versions of some of these geom.getPlanarShapeArea = function(shp, arcs) { return (shp || []).reduce(function(area, ids) { return area + geom.getPlanarPathArea(ids, arcs); }, 0); }; geom.getSphericalShapeArea = function(shp, arcs) { if (arcs.isPlanar()) { error("[getSphericalShapeArea()] Function requires decimal degree coordinates"); } return (shp || []).reduce(function(area, ids) { return area + geom.getSphericalPathArea(ids, arcs); }, 0); }; // Return path with the largest (area) bounding box // @shp array of array of arc ids // @arcs ArcCollection geom.getMaxPath = function(shp, arcs) { var maxArea = 0; return (shp || []).reduce(function(maxPath, path) { var bbArea = arcs.getSimpleShapeBounds(path).area(); if (bbArea > maxArea) { maxArea = bbArea; maxPath = path; } return maxPath; }, null); }; // @ids array of arc ids // @arcs ArcCollection geom.getAvgPathXY = function(ids, arcs) { var iter = arcs.getShapeIter(ids); if (!iter.hasNext()) return null; var x0 = iter.x, y0 = iter.y, count = 0, sumX = 0, sumY = 0; while (iter.hasNext()) { count++; sumX += iter.x; sumY += iter.y; } if (count === 0 || iter.x !== x0 || iter.y !== y0) { sumX += x0; sumY += y0; count++; } return { x: sumX / count, y: sumY / count }; }; // Return true if point is inside or on boundary of a shape // geom.testPointInPolygon = function(x, y, shp, arcs) { var isIn = false, isOn = false; if (shp) { shp.forEach(function(ids) { var inRing = geom.testPointInRing(x, y, ids, arcs); if (inRing == 1) { isIn = !isIn; } else if (inRing == -1) { isOn = true; } }); } return isOn || isIn; }; geom.getPointToPathDistance = function(px, py, ids, arcs) { var iter = arcs.getShapeIter(ids); if (!iter.hasNext()) return Infinity; var ax = iter.x, ay = iter.y, paSq = distanceSq(px, py, ax, ay), pPathSq = paSq, pbSq, abSq, bx, by; while (iter.hasNext()) { bx = iter.x; by = iter.y; pbSq = distanceSq(px, py, bx, by); abSq = distanceSq(ax, ay, bx, by); pPathSq = Math.min(pPathSq, apexDistSq(paSq, pbSq, abSq)); ax = bx; ay = by; paSq = pbSq; } return Math.sqrt(pPathSq); }; geom.getYIntercept = function(x, ax, ay, bx, by) { return ay + (x - ax) * (by - ay) / (bx - ax); }; geom.getXIntercept = function(y, ax, ay, bx, by) { return ax + (y - ay) * (bx - ax) / (by - ay); }; // Return unsigned distance of a point to a shape // geom.getPointToShapeDistance = function(x, y, shp, arcs) { var minDist = (shp || []).reduce(function(minDist, ids) { var pathDist = geom.getPointToPathDistance(x, y, ids, arcs); return Math.min(minDist, pathDist); }, Infinity); return minDist; }; // Test if point (x, y) is inside, outside or on the boundary of a polygon ring // Return 0: outside; 1: inside; -1: on boundary // geom.testPointInRing = function(x, y, ids, arcs) { /* // arcs.getSimpleShapeBounds() doesn't apply simplification, can't use here //// wait, why not? simplifcation shoudn't expand bounds, so this test makes sense if (!arcs.getSimpleShapeBounds(ids).containsPoint(x, y)) { return false; } */ var isIn = false, isOn = false; MapShaper.forEachPathSegment(ids, arcs, function(a, b, xx, yy) { var result = geom.testRayIntersection(x, y, xx[a], yy[a], xx[b], yy[b]); if (result == 1) { isIn = !isIn; } else if (isNaN(result)) { isOn = true; } }); return isOn ? -1 : (isIn ? 1 : 0); }; // test if a vertical ray originating at (x, y) intersects a segment // returns 1 if intersection, 0 if no intersection, NaN if point touches segment // (Special rules apply to endpoint intersections, to support point-in-polygon testing.) geom.testRayIntersection = function(x, y, ax, ay, bx, by) { var val = geom.getRayIntersection(x, y, ax, ay, bx, by); if (val != val) { return NaN; } return val == -Infinity ? 0 : 1; }; geom.getRayIntersection = function(x, y, ax, ay, bx, by) { var hit = -Infinity, // default: no hit yInt; // case: p is entirely above, left or right of segment if (x < ax && x < bx || x > ax && x > bx || y > ay && y > by) { // no intersection } // case: px aligned with a segment vertex else if (x === ax || x === bx) { // case: vertical segment or collapsed segment if (x === ax && x === bx) { // p is on segment if (y == ay || y == by || y > ay != y > by) { hit = NaN; } // else: no hit } // case: px equal to ax (only) else if (x === ax) { if (y === ay) { hit = NaN; } else if (bx < ax && y < ay) { // only score hit if px aligned to rightmost endpoint hit = ay; } } // case: px equal to bx (only) else { if (y === by) { hit = NaN; } else if (ax < bx && y < by) { // only score hit if px aligned to rightmost endpoint hit = by; } } // case: px is between endpoints } else { yInt = geom.getYIntercept(x, ax, ay, bx, by); if (yInt > y) { hit = yInt; } else if (yInt == y) { hit = NaN; } } return hit; }; geom.getSphericalPathArea = function(ids, arcs) { var iter = arcs.getShapeIter(ids), sum = 0, started = false, deg2rad = Math.PI / 180, x, y, xp, yp; while (iter.hasNext()) { x = iter.x * deg2rad; y = Math.sin(iter.y * deg2rad); if (started) { sum += (x - xp) * (2 + y + yp); } else { started = true; } xp = x; yp = y; } return sum / 2 * 6378137 * 6378137; }; // Get path area from an array of [x, y] points // TODO: consider removing duplication with getPathArea(), e.g. by // wrapping points in an iterator. // geom.getPlanarPathArea2 = function(points) { var sum = 0, ax, ay, bx, by, dx, dy, p; for (var i=0, n=points.length; i 0) { count += countCrosses(x, y, bucketId - 1); } count += countCrosses(x, y, bucketCount); // check oflo bucket if (isNaN(count)) return -1; return count % 2 == 1 ? 1 : 0; }; function countCrosses(x, y, bucketId) { var offs = bucketOffsets[bucketId], count = 0, xx = data.xx, yy = data.yy, n, a, b; if (bucketId == bucketCount) { // oflo bucket n = xminIds.length - offs; } else { n = bucketOffsets[bucketId + 1] - offs; } for (var i=0; i= bucketCount) i = bucketCount - 1; return i; } function getBucketCount(segCount) { // default is 100 segs per bucket (average) var buckets = opts && opts.buckets > 0 ? opts.buckets : segCount / 100; return Math.ceil(buckets); } function init() { var xx = data.xx, segCount = 0, segId = 0, bucketId = -1, prevBucketId, segments, head, tail, a, b, i, j, xmin, xmax; // get array of segments as [s0p0, s0p1, s1p0, s1p1, ...], sorted by xmin coordinate MapShaper.forEachPathSegment(shape, arcs, function() { segCount++; }); segments = new Uint32Array(segCount * 2); i = 0; MapShaper.forEachPathSegment(shape, arcs, function(a, b, xx, yy) { segments[i++] = a; segments[i++] = b; }); MapShaper.sortSegmentIds(xx, segments); // assign segments to buckets according to xmin coordinate xminIds = new Uint32Array(segCount); xmaxIds = new Uint32Array(segCount); bucketCount = getBucketCount(segCount); bucketOffsets = new Uint32Array(bucketCount + 1); // add an oflo bucket boundsLeft = xx[segments[0]]; // xmin of first segment bucketWidth = (xx[segments[segments.length - 2]] - boundsLeft) / bucketCount; head = 0; // insertion index for next segment in the current bucket tail = segCount - 1; // insertion index for next segment in oflo bucket while (segId < segCount) { j = segId * 2; a = segments[j]; b = segments[j+1]; xmin = xx[a]; xmax = xx[b]; prevBucketId = bucketId; bucketId = getBucketId(xmin); while (bucketId > prevBucketId) { prevBucketId++; bucketOffsets[prevBucketId] = head; } if (xmax - xmin >= 0 === false) error("Invalid segment"); if (getBucketId(xmax) - bucketId > 1) { // if segment extends to more than two buckets, put it in the oflo bucket xminIds[tail] = a; xmaxIds[tail] = b; tail--; // oflo bucket fills from right to left } else { // else place segment in a bucket based on x coord of leftmost endpoint xminIds[head] = a; xmaxIds[head] = b; head++; } segId++; } bucketOffsets[bucketCount] = head; if (head != tail + 1) error("Segment indexing error"); } } function PathIndex(shapes, arcs) { var _index; // var totalArea = arcs.getBounds().area(); var totalArea = MapShaper.getPathBounds(shapes, arcs).area(); init(shapes); function init(shapes) { var boxes = []; shapes.forEach(function(shp, shpId) { var n = shp ? shp.length : 0; for (var i=0; i totalArea * 0.02) { bbox.index = new PolygonIndex([ids], arcs); } } } this.findEnclosingShape = function(p) { var shpId = -1; var shapes = findPointHitShapes(p); shapes.forEach(function(paths) { if (testPointInRings(p, paths)) { shpId = paths[0].id; } }); return shpId; }; this.pointIsEnclosed = function(p) { return testPointInRings(p, findPointHitRings(p)); }; this.arcIsEnclosed = function(arcId) { return this.pointIsEnclosed(getTestPoint(arcId)); }; // Test if a polygon ring is contained within an indexed ring // Not a true polygon-in-polygon test // Assumes that the target ring does not cross an indexed ring at any point // or share a segment with an indexed ring. (Intersecting rings should have // been detected previously). // this.pathIsEnclosed = function(pathIds) { var arcId = pathIds[0]; var p = getTestPoint(arcId); return this.pointIsEnclosed(p); }; // return array of paths that are contained within a path, or null if none // @pathIds Array of arc ids comprising a closed path this.findEnclosedPaths = function(pathIds) { var pathBounds = arcs.getSimpleShapeBounds(pathIds), cands = _index.search(pathBounds.toArray()), paths = [], index; if (cands.length > 6) { index = new PolygonIndex([pathIds], arcs); } cands.forEach(function(cand) { var p = getTestPoint(cand.ids[0]); var isEnclosed = index ? index.pointInPolygon(p[0], p[1]) : pathContainsPoint(pathIds, pathBounds, p); if (isEnclosed) { paths.push(cand.ids); } }); return paths.length > 0 ? paths : null; }; this.findPathsInsideShape = function(shape) { var paths = []; shape.forEach(function(ids) { var enclosed = this.findEnclosedPaths(ids); if (enclosed) { paths = xorArrays(paths, enclosed); } }, this); return paths.length > 0 ? paths : null; }; function testPointInRings(p, cands) { var isOn = false, isIn = false; cands.forEach(function(cand) { var inRing = cand.index ? cand.index.pointInPolygon(p[0], p[1]) : pathContainsPoint(cand.ids, cand.bounds, p); if (inRing == -1) { isOn = true; } else if (inRing == 1) { isIn = !isIn; } }); return isOn || isIn; } function findPointHitShapes(p) { var rings = findPointHitRings(p), shapes = [], shape, bbox; if (rings.length > 0) { rings.sort(function(a, b) {return a.id - b.id;}); for (var i=0; i 0) { var p1 = arcs.getVertex(id, -1); str += utils.format("[%f, %f]", p1.x, p1.y); if (len > 1) { var p2 = arcs.getVertex(id, -2); str += utils.format(", [%f, %f]", p2.x, p2.y); if (len > 2) { var p3 = arcs.getVertex(id, 0); str += utils.format(", [%f, %f]", p3.x, p3.y); } str += " len: " + distance2D(p1.x, p1.y, p2.x, p2.y); } } else { str = "[]"; } message(str); } }; this.forEachConnectedArc = function(arcId, cb) { var nextId = nextConnectedArc(arcId), i = 0; while (nextId != arcId) { cb(nextId, i++); nextId = nextConnectedArc(nextId); } }; // Returns the id of the first identical arc or @arcId if none found // TODO: find a better function name this.findMatchingArc = function(arcId) { var verbose = arcId == -12794 || arcId == 19610; var nextId = nextConnectedArc(arcId), match = arcId; while (nextId != arcId) { if (testArcMatch(arcId, nextId)) { if (absArcId(nextId) < absArcId(match)) match = nextId; } nextId = nextConnectedArc(nextId); } if (match != arcId) { trace("found identical arc:", arcId, "->", match); // this.debugNode(arcId); } return match; }; function testArcMatch(a, b) { var absA = a >= 0 ? a : ~a, absB = b >= 0 ? b : ~b, lenA = nn[absA]; if (lenA < 2) { // Don't throw error on collapsed arcs -- assume they will be handled // appropriately downstream. // error("[testArcMatch() defective arc; len:", lenA); return false; } if (lenA != nn[absB]) return false; if (testVertexMatch(a, b, -1) && testVertexMatch(a, b, 1) && testVertexMatch(a, b, -2)) { return true; } return false; } function testVertexMatch(a, b, i) { var ai = arcs.indexOfVertex(a, i), bi = arcs.indexOfVertex(b, i); return xx[ai] == xx[bi] && yy[ai] == yy[bi]; } // return arcId of next arc in the chain, pointed towards the shared vertex function nextConnectedArc(arcId) { var fw = arcId >= 0, absId = fw ? arcId : ~arcId, nodeId = fw ? absId * 2 + 1: absId * 2, // if fw, use end, if rev, use start chainedId = nodeData.chains[nodeId], nextAbsId = chainedId >> 1, nextArcId = chainedId & 1 == 1 ? nextAbsId : ~nextAbsId; if (chainedId < 0 || chainedId >= nodeData.chains.length) error("out-of-range chain id"); if (absId >= nn.length) error("out-of-range arc id"); if (nodeData.chains.length <= nodeId) error("out-of-bounds node id"); return nextArcId; } // expose for testing this.internal = { testArcMatch: testArcMatch, testVertexMatch: testVertexMatch }; } MapShaper.findNodeTopology = function(arcs, filter) { var n = arcs.size() * 2, xx2 = new Float64Array(n), yy2 = new Float64Array(n), ids2 = new Int32Array(n); arcs.forEach2(function(i, n, xx, yy, zz, arcId) { if (filter && !filter(arcId)) { return; } var start = i, end = i + n - 1, start2 = arcId * 2, end2 = start2 + 1; xx2[start2] = xx[start]; yy2[start2] = yy[start]; ids2[start2] = arcId; xx2[end2] = xx[end]; yy2[end2] = yy[end]; ids2[end2] = arcId; }); var chains = initPointChains(xx2, yy2); return { xx: xx2, yy: yy2, ids: ids2, chains: chains }; }; // Return function for splitting self-intersecting polygon rings // Returned function receives a single path, returns an array of paths // Assumes that any intersections occur at vertices, not along segments // (requires that MapShaper.divideArcs() has already been run) // MapShaper.getSelfIntersectionSplitter = function(nodes) { function contains(arr, el) { for (var i=0, n=arr.length; i -1) { // ignore arcs that are not on this path if (contains(path, exitId)) { // ignore arcs that are not on this path if (count === 0) { firstExitId = exitId; } else if (count === 1) { exitIds = [firstExitId, exitId]; } else { exitIds.push(exitId); } count++; } }); if (exitIds) { subPaths = MapShaper.splitPathByIds(path, exitIds); // recursively divide each sub-path return subPaths.reduce(function(memo, subPath) { return memo.concat(dividePath(subPath)); }, []); } return null; } function dividePath(path) { var subPaths = null; for (var i=0; i 0 ? [path] : []; } return dividePath; }; // @path An array of arc ids // @ids An array of two or more start ids MapShaper.splitPathByIds = function(path, ids) { var n = ids.length; var ii = ids.map(function(id) { var idx = path.indexOf(id); if (idx == -1) error("[splitPathByIds()] Path is missing id:", id); return idx; }); utils.genericSort(ii, true); var subPaths = ii.map(function(idx, i) { var split; if (i == n-1) { // place first path item first split = path.slice(0, ii[0]).concat(path.slice(idx)); } else { split = path.slice(idx, ii[i+1]); } return split; }); // make sure first sub-path starts with arc at path[0] if (ii[0] !== 0) { subPaths.unshift(subPaths.pop()); } if (subPaths[0][0] !== path[0]) { error("[splitPathByIds()] Indexing error"); } return subPaths; }; // Functions for redrawing polygons for clipping / erasing / flattening / division MapShaper.setBits = function(src, flags, mask) { return (src & ~mask) | (flags & mask); }; MapShaper.andBits = function(src, flags, mask) { return src & (~mask | flags); }; MapShaper.setRouteBits = function(bits, id, flags) { var abs = absArcId(id), mask; if (abs == id) { // fw mask = ~3; } else { mask = ~0x30; bits = bits << 4; } flags[abs] &= (bits | mask); }; MapShaper.getRouteBits = function(id, flags) { var abs = absArcId(id), bits = flags[abs]; if (abs != id) bits = bits >> 4; return bits & 7; }; // enable arc pathways in a single shape or array of shapes // Uses 8 bits to control traversal of each arc // 0-3: forward arc; 4-7: rev arc // 0: fw path is visible // 1: fw path is open for traversal // ... // MapShaper.openArcRoutes = function(arcIds, arcs, flags, fwd, rev, dissolve, orBits) { MapShaper.forEachArcId(arcIds, function(id) { var isInv = id < 0, absId = isInv ? ~id : id, currFlag = flags[absId], openFwd = isInv ? rev : fwd, openRev = isInv ? fwd : rev, newFlag = currFlag; // error condition: lollipop arcs can cause problems; ignore these if (arcs.arcIsLollipop(id)) { trace('lollipop'); newFlag = 0; // unset (i.e. make invisible) } else { if (openFwd) { newFlag |= 3; // visible / open } if (openRev) { newFlag |= 0x30; // visible / open } // placing this in front of dissolve - dissolve has to be able to hide // arcs that are set to visible if (orBits > 0) { newFlag |= orBits; } // dissolve hides arcs that have both fw and rev pathways open if (dissolve && (newFlag & 0x22) === 0x22) { newFlag &= ~0x11; // make invisible } } flags[absId] = newFlag; }); }; MapShaper.closeArcRoutes = function(arcIds, arcs, flags, fwd, rev, hide) { MapShaper.forEachArcId(arcIds, function(id) { var isInv = id < 0, absId = isInv ? ~id : id, currFlag = flags[absId], mask = 0xff, closeFwd = isInv ? rev : fwd, closeRev = isInv ? fwd : rev; if (closeFwd) { if (hide) mask &= ~1; mask ^= 0x2; } if (closeRev) { if (hide) mask &= ~0x10; mask ^= 0x20; } flags[absId] = currFlag & mask; }); }; // Return a function for generating a path across a field of intersecting arcs // TODO: add option to calculate angle on sphere for lat-lng coords // MapShaper.getPathFinder = function(nodes, useRoute, routeIsVisible, chooseRoute, spherical) { var arcs = nodes.arcs, coords = arcs.getVertexData(), xx = coords.xx, yy = coords.yy, calcAngle = spherical ? geom.signedAngleSph : geom.signedAngle; function getNextArc(prevId) { var ai = arcs.indexOfVertex(prevId, -2), ax = xx[ai], ay = yy[ai], bi = arcs.indexOfVertex(prevId, -1), bx = xx[bi], by = yy[bi], nextId = NaN, nextAngle = 0; nodes.forEachConnectedArc(prevId, function(candId) { if (!routeIsVisible(~candId)) return; if (arcs.getArcLength(candId) < 2) error("[pathfinder] defective arc"); var ci = arcs.indexOfVertex(candId, -2), cx = xx[ci], cy = yy[ci], // sanity check: make sure both arcs share the same vertex; di = arcs.indexOfVertex(candId, -1), dx = xx[di], dy = yy[di], candAngle; if (dx !== bx || dy !== by) { message("cd:", cx, cy, dx, dy, 'arc:', candId); error("Error in node topology"); } candAngle = calcAngle(ax, ay, bx, by, cx, cy); if (candAngle > 0) { if (nextAngle === 0) { nextId = candId; nextAngle = candAngle; } else { var choice = chooseRoute(~nextId, nextAngle, ~candId, candAngle, prevId); if (choice == 2) { nextId = candId; nextAngle = candAngle; } } } else { // candAngle is NaN or 0 trace("#getNextArc() Invalid angle; id:", candId, "angle:", candAngle); nodes.debugNode(prevId); } }); if (nextId === prevId) { // TODO: confirm that this can't happen nodes.debugNode(prevId); error("#getNextArc() nextId === prevId"); } return ~nextId; // reverse arc to point onwards } return function(startId) { var path = [], nextId, msg, candId = startId, verbose = false; do { if (verbose) msg = (nextId === undefined ? " " : " " + nextId) + " -> " + candId; if (useRoute(candId)) { path.push(candId); nextId = candId; if (verbose) message(msg); candId = getNextArc(nextId); if (verbose && candId == startId ) message(" o", geom.getPlanarPathArea(path, arcs)); } else { if (verbose) message(msg + " x"); return null; } if (candId == ~nextId) { trace("dead-end"); // TODO: handle or prevent this error condition return null; } } while (candId != startId); return path.length === 0 ? null : path; }; }; // types: "dissolve" "flatten" // Returns a function for flattening or dissolving a collection of rings // Assumes rings are oriented in CW direction // MapShaper.getRingIntersector = function(nodes, type, flags, spherical) { var arcs = nodes.arcs; var findPath = MapShaper.getPathFinder(nodes, useRoute, routeIsActive, chooseRoute, spherical); flags = flags || new Uint8Array(arcs.size()); return function(rings) { var dissolve = type == 'dissolve', openFwd = true, openRev = type == 'flatten', output; // even single rings get transformed (e.g. to remove spikes) if (rings.length > 0) { output = []; MapShaper.openArcRoutes(rings, arcs, flags, openFwd, openRev, dissolve); MapShaper.forEachPath(rings, function(ids) { var path; for (var i=0, n=ids.length; i 0 ? "1" : "0"; if (i < 7) str += ' '; if (i == 3) str += ' '; } return str; } }; /* // Print info about two arcs whose first segments are parallel // MapShaper.debugRoute = function(id1, id2, arcs) { var n1 = arcs.getArcLength(id1), n2 = arcs.getArcLength(id2), len1 = 0, len2 = 0, p1, p2, pp1, pp2, ppp1, ppp2, angle1, angle2; console.log("chooseRoute() lengths:", n1, n2, 'ids:', id1, id2); for (var i=0; i 1) { angle1 = signedAngle(ppp1.x, ppp1.y, pp1.x, pp1.y, p1.x, p1.y); angle2 = signedAngle(ppp2.x, ppp2.y, pp2.x, pp2.y, p2.x, p2.y); console.log("angles:", angle1, angle2, 'lens:', len1, len2); // return; } if (i >= 1) { len1 += distance2D(p1.x, p1.y, pp1.x, pp1.y); len2 += distance2D(p2.x, p2.y, pp2.x, pp2.y); } if (i == 1 && (n1 == 2 || n2 == 2)) { console.log("arc1:", pp1, p1, "len:", len1); console.log("arc2:", pp2, p2, "len:", len2); } ppp1 = pp1; ppp2 = pp2; pp1 = p1; pp2 = p2; } return 1; }; */ // Returns a function that separates rings in a polygon into space-enclosing rings // and holes. Also fixes self-intersections. // MapShaper.getHoleDivider = function(nodes, spherical) { var split = MapShaper.getSelfIntersectionSplitter(nodes); return function(rings, cw, ccw) { var pathArea = spherical ? geom.getSphericalPathArea : geom.getPlanarPathArea; MapShaper.forEachPath(rings, function(ringIds) { var splitRings = split(ringIds); if (splitRings.length === 0) { trace("[getRingDivider()] Defective path:", ringIds); } splitRings.forEach(function(ringIds, i) { var ringArea = pathArea(ringIds, nodes.arcs); if (ringArea > 0) { cw.push(ringIds); } else if (ringArea < 0) { ccw.push(ringIds); } }); }); }; }; // clean polygon or polyline shapes, in-place // MapShaper.cleanShapes = function(shapes, arcs, type) { for (var i=0, n=shapes.length; i 0 ? path.filter(function(id) {return id !== null;}) : path; }; // Remove pairs of ids where id[n] == ~id[n+1] or id[0] == ~id[n-1]; // (in place) MapShaper.removeSpikesInPath = function(ids) { var n = ids.length; if (n >= 2) { if (ids[0] == ~ids[n-1]) { ids.pop(); ids.shift(); } else { for (var i=1; i 0 ? 1 : -1, mainRing; var maxArea = splitIds.reduce(function(max, ringIds, i) { var pathArea = geom.getPlanarPathArea(ringIds, nodes.arcs) * sign; if (pathArea > max) { mainRing = ringIds; max = pathArea; } return max; }, 0); if (mainRing) { cleanedPolygon.push(mainRing); } } }); return cleanedPolygon.length > 0 ? cleanedPolygon : null; } }; // Functions for dividing polygons and polygons at points where arc-segments intersect // Divide a collection of arcs at points where segments intersect // and re-index the paths of all the layers that reference the arc collection. // (in-place) MapShaper.divideArcs = function(dataset) { var arcs = dataset.arcs; T.start(); T.start(); var snapDist = MapShaper.getHighPrecisionSnapInterval(arcs); var snapCount = MapShaper.snapCoordsByInterval(arcs, snapDist); var dupeCount = arcs.dedupCoords(); T.stop('snap points'); if (snapCount > 0 || dupeCount > 0) { T.start(); // Detect topology again if coordinates have changed api.buildTopology(dataset); T.stop('rebuild topology'); } // clip arcs at points where segments intersect T.start(); var map = MapShaper.insertClippingPoints(arcs); T.stop('insert clipping points'); T.start(); // update arc ids in arc-based layers and clean up arc geometry // to remove degenerate arcs and duplicate points var nodes = new NodeCollection(arcs); dataset.layers.forEach(function(lyr) { if (MapShaper.layerHasPaths(lyr)) { MapShaper.updateArcIds(lyr.shapes, map, nodes); // TODO: consider alternative -- avoid creating degenerate arcs // in insertClippingPoints() MapShaper.cleanShapes(lyr.shapes, arcs, lyr.geometry_type); } }); T.stop('update arc ids / clean geometry'); T.stop("divide arcs"); return nodes; }; MapShaper.updateArcIds = function(shapes, map, nodes) { var arcCount = nodes.arcs.size(), shape2; for (var i=0; i= map.length - 1 ? arcCount : map[absId + 1]) - 1, id2; do { if (rev) { id2 = ~max; max--; } else { id2 = min; min++; } // If there are duplicate arcs, always use the same one if (nodes) { id2 = nodes.findMatchingArc(id2); } ids.push(id2); } while (max - min >= 0); } }; // divide a collection of arcs at points where line segments cross each other // @arcs ArcCollection // returns array that maps original arc ids to new arc ids MapShaper.insertClippingPoints = function(arcs) { var points = MapShaper.findClippingPoints(arcs), p; // TODO: avoid some or all of the following if no points need to be added // original arc data var pointTotal0 = arcs.getPointCount(), arcTotal0 = arcs.size(), data = arcs.getVertexData(), xx0 = data.xx, yy0 = data.yy, nn0 = data.nn, i0 = 0, n0, arcLen0; // new arc data var pointTotal1 = pointTotal0 + points.length * 2, xx1 = new Float64Array(pointTotal1), yy1 = new Float64Array(pointTotal1), nn1 = [], // number of arcs may vary i1 = 0, n1; var map = new Uint32Array(arcTotal0); // sort from last point to first point points.sort(function(a, b) { return b.i - a.i || b.pct - a.pct; }); p = points.pop(); for (var id0=0, id1=0; id0 < arcTotal0; id0++) { arcLen0 = nn0[id0]; map[id0] = id1; n0 = 0; n1 = 0; while (n0 < arcLen0) { n1++; xx1[i1] = xx0[i0]; yy1[i1++] = yy0[i0]; while (p && p.i === i0) { xx1[i1] = p.x; yy1[i1++] = p.y; n1++; nn1[id1++] = n1; // end current arc at intersection n1 = 0; // begin new arc xx1[i1] = p.x; yy1[i1++] = p.y; n1++; p = points.pop(); } n0++; i0++; } nn1[id1++] = n1; } if (i1 != pointTotal1) error("[insertClippingPoints()] Counting error"); arcs.updateVertexData(nn1, xx1, yy1, null); // segment-point intersections create duplicate points // TODO: consider removing call to dedupCoords() -- empty arcs are removed by cleanShapes() arcs.dedupCoords(); return map; }; MapShaper.findClippingPoints = function(arcs) { var intersections = MapShaper.findSegmentIntersections(arcs), data = arcs.getVertexData(), xx = data.xx, yy = data.yy, points = []; intersections.forEach(function(o) { var p1 = getSegmentIntersection(o.x, o.y, o.a), p2 = getSegmentIntersection(o.x, o.y, o.b); if (p1) points.push(p1); if (p2) points.push(p2); }); // remove 1. points that are at arc endpoints and 2. duplicate points // (kludgy -- look into preventing these cases, which are caused by T intersections) var index = {}; return points.filter(function(p) { var key = p.i + "," + p.pct; if (key in index) return false; index[key] = true; if (p.pct <= 0 && arcs.pointIsEndpoint(p.i) || p.pct >= 1 && arcs.pointIsEndpoint(p.j)) { return false; } return true; }); function getSegmentIntersection(x, y, ids) { var i = ids[0], j = ids[1], dx = xx[j] - xx[i], dy = yy[j] - yy[i], pct; if (i > j) error("[findClippingPoints()] Out-of-sequence arc ids"); if (dx === 0 && dy === 0) { pct = 0; } else if (Math.abs(dy) > Math.abs(dx)) { pct = (y - yy[i]) / dy; } else { pct = (x - xx[i]) / dx; } if (pct < 0 || pct > 1) { verbose("[findClippingPoints()] Off-segment intersection (caused by rounding error"); trace("pct:", pct, "dx:", dx, "dy:", dy, 'x:', x, 'y:', y, 'xx[i]:', xx[i], 'xx[j]:', xx[j], 'yy[i]:', yy[i], 'yy[j]:', yy[j]); trace("xpct:", (x - xx[i]) / dx, 'ypct:', (y - yy[i]) / dy); if (pct < 0) pct = 0; if (pct > 1) pct = 1; } return { pct: pct, i: i, j: j, x: x, y: y }; } }; // List of encodings supported by iconv-lite: // https://github.com/ashtuchkin/iconv-lite/wiki/Supported-Encodings // Return list of supported encodings MapShaper.getEncodings = function() { var iconv = require('iconv-lite'); iconv.encodingExists('ascii'); // make iconv load its encodings return Object.keys(iconv.encodings); }; MapShaper.validateEncoding = function(enc) { if (!MapShaper.encodingIsSupported(enc)) { stop("Unknown encoding:", enc, "\nRun the -encodings command see a list of supported encodings"); } return enc; }; MapShaper.encodingIsSupported = function(raw) { var enc = MapShaper.standardizeEncodingName(raw); return utils.contains(MapShaper.getEncodings(), enc); }; // @buf a Node Buffer MapShaper.decodeString = function(buf, encoding) { var iconv = require('iconv-lite'), str = iconv.decode(buf, encoding); // remove BOM if present if (str.charCodeAt(0) == 0xfeff) { str = str.substr(1); } return str; }; // Ex. convert UTF-8 to utf8 MapShaper.standardizeEncodingName = function(enc) { return enc.toLowerCase().replace(/[_-]/g, ''); }; MapShaper.printEncodings = function() { var encodings = MapShaper.getEncodings().filter(function(name) { // filter out some aliases and non-applicable encodings return !/^(_|cs|internal|ibm|isoir|singlebyte|table|[0-9]|l[0-9]|windows)/.test(name); }); encodings.sort(); message("Supported encodings:"); message(MapShaper.formatStringsAsGrid(encodings)); }; // Try to detect the encoding of some sample text. // Returns an encoding name or null. // @samples Array of buffers containing sample text fields // TODO: Improve reliability and number of detectable encodings. MapShaper.detectEncoding = function(samples) { var encoding = null; if (MapShaper.looksLikeUtf8(samples)) { encoding = 'utf8'; } else if (MapShaper.looksLikeWin1252(samples)) { // Win1252 is the same as Latin1, except it replaces a block of control // characters with n-dash, Euro and other glyphs. Encountered in-the-wild // in Natural Earth (airports.dbf uses n-dash). encoding = 'win1252'; } return encoding; }; // Convert an array of text samples to a single string using a given encoding MapShaper.decodeSamples = function(enc, samples) { return samples.map(function(buf) { return MapShaper.decodeString(buf, enc).trim(); }).join('\n'); }; MapShaper.formatSamples = function(str) { return MapShaper.formatStringsAsGrid(str.split('\n')); }; // Quick-and-dirty win1251 detection: decoded string contains mostly common ascii // chars and almost no chars other than word chars + punctuation. // This excludes encodings like Greek, Cyrillic or Thai, but // is susceptible to false positives with encodings like codepage 1250 ("Eastern // European"). MapShaper.looksLikeWin1252 = function(samples) { var ascii = 'abcdefghijklmnopqrstuvwxyz0123456789.\'"?+-\n,:;/|_$% ', //common l.c. ascii chars extended = 'ßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýÿ°–', // common extended str = MapShaper.decodeSamples('win1252', samples), asciiScore = MapShaper.getCharScore(str, ascii), totalScore = MapShaper.getCharScore(str, extended + ascii); return totalScore > 0.97 && asciiScore > 0.7; }; // Accept string if it doesn't contain the "replacement character" MapShaper.looksLikeUtf8 = function(samples) { var str = MapShaper.decodeSamples('utf8', samples); return str.indexOf('\ufffd') == -1; }; // Calc percentage of chars in a string that are present in a second string // @chars String of chars to look for in @str MapShaper.getCharScore = function(str, chars) { var index = {}, count = 0, score; str = str.toLowerCase(); for (var i=0, n=chars.length; i 0 || c != 32) { // ignore leading spaces (e.g. DBF numbers) buf[count++] = c; } } // ignore trailing spaces (DBF string fields are typically r-padded w/ spaces) while (count > 0 && buf[count-1] == 32) { count--; } return count; }; Dbf.getAsciiStringReader = function() { var buf = new Uint8Array(256); // new Buffer(256); return function readAsciiString(bin, size) { var str = '', n = Dbf.readStringBytes(bin, size, buf); for (var i=0; i= 128) return true; } return false; }; Dbf.getNumberReader = function() { var read = Dbf.getAsciiStringReader(); return function readNumber(bin, size) { var str = read(bin, size); var val; if (str.indexOf(',') >= 0) { str = str.replace(',', '.'); // handle comma decimal separator } val = parseFloat(str); return isNaN(val) ? null : val; }; }; Dbf.readInt = function(bin, size) { return bin.readInt32(); }; Dbf.readBool = function(bin, size) { var c = bin.readCString(size), val = null; if (/[ty]/i.test(c)) val = true; else if (/[fn]/i.test(c)) val = false; return val; }; Dbf.readDate = function(bin, size) { var str = bin.readCString(size), yr = str.substr(0, 4), mo = str.substr(4, 2), day = str.substr(6, 2); return new Date(Date.UTC(+yr, +mo - 1, +day)); }; // cf. http://code.google.com/p/stringencoding/ // // @src is a Buffer or ArrayBuffer or filename // function DbfReader(src, encodingArg) { if (utils.isString(src)) { error("[DbfReader] Expected a buffer, not a string"); } var bin = new BinArray(src); var header = readHeader(bin); var encoding = encodingArg || null; this.size = function() {return header.recordCount;}; this.readRow = function(i) { // create record reader on-the-fly // (delays encoding detection until we need to read data) return getRecordReader(header.fields)(i); }; this.getFields = getFieldNames; this.getBuffer = function() {return bin.buffer();}; this.deleteField = function(f) { header.fields = header.fields.filter(function(field) { return field.name != f; }); }; this.readRows = function() { var reader = getRecordReader(header.fields); var data = []; for (var r=0, n=this.size(); r eofOffs) { stop('[dbf] Invalid DBF file: encountered end-of-file while reading data'); } bin.position(fieldOffs); values[c] = readers[c](bin, field.size); } return create.apply(null, values); }; } // @f Field metadata from dbf header function getFieldReader(f) { var type = f.type, r = null; if (type == 'I') { r = Dbf.readInt; } else if (type == 'F' || type == 'N') { r = Dbf.getNumberReader(); } else if (type == 'L') { r = Dbf.readBool; } else if (type == 'D') { r = Dbf.readDate; } else if (type == 'C') { r = Dbf.getStringReader(getEncoding()); } else { message("[dbf] Field \"" + field.name + "\" has an unsupported type (" + field.type + ") -- converting to null values"); r = function() {return null;}; } return r; } function findStringEncoding() { var ldid = header.ldid, codepage = Dbf.lookupCodePage(ldid), samples = getNonAsciiSamples(50), only7bit = samples.length === 0, encoding, msg; // First, check the ldid (language driver id) (an obsolete way to specify which // codepage to use for text encoding.) // ArcGIS up to v.10.1 sets ldid and encoding based on the 'locale' of the // user's Windows system :P // if (codepage && ldid != 87) { // if 8-bit data is found and codepage is detected, use the codepage, // except ldid 87, which some GIS software uses regardless of encoding. encoding = codepage; } else if (only7bit) { // Text with no 8-bit chars should be compatible with 7-bit ascii // (Most encodings are supersets of ascii) encoding = 'ascii'; } // As a last resort, try to guess the encoding: if (!encoding) { encoding = MapShaper.detectEncoding(samples); } // Show a sample of decoded text if non-ascii-range text has been found if (encoding && samples.length > 0) { msg = "Detected DBF text encoding: " + encoding; if (encoding in Dbf.encodingNames) { msg += " (" + Dbf.encodingNames[encoding] + ")"; } message(msg); msg = MapShaper.decodeSamples(encoding, samples); msg = MapShaper.formatStringsAsGrid(msg.split('\n')); message("Sample text containing non-ascii characters:" + (msg.length > 60 ? '\n' : '') + msg); } return encoding; } // Return up to @size buffers containing text samples // with at least one byte outside the 7-bit ascii range. function getNonAsciiSamples(size) { var samples = []; var stringFields = header.fields.filter(function(f) { return f.type == 'C'; }); var buf = new Buffer(256); var index = {}; var f, chars, sample, hash; for (var r=0, rows=header.recordCount; r= size) break; f = stringFields[c]; bin.position(getRowOffset(r) + f.columnOffset); chars = Dbf.readStringBytes(bin, f.size, buf); if (chars > 0 && Dbf.bufferContainsHighBit(buf, chars)) { sample = new Buffer(buf.slice(0, chars)); // hash = sample.toString('hex'); if (hash in index === false) { // avoid duplicate samples index[hash] = true; samples.push(sample); } } } } return samples; } } Dbf.MAX_STRING_LEN = 254; Dbf.exportRecords = function(arr, encoding) { encoding = encoding || 'ascii'; var fields = Dbf.getFieldNames(arr); var uniqFields = MapShaper.getUniqFieldNames(fields, 10); var rows = arr.length; var fieldData = fields.map(function(name) { return Dbf.getFieldInfo(arr, name, encoding); }); var headerBytes = Dbf.getHeaderSize(fieldData.length), recordBytes = Dbf.getRecordSize(utils.pluck(fieldData, 'size')), fileBytes = headerBytes + rows * recordBytes + 1; var buffer = new ArrayBuffer(fileBytes); var bin = new BinArray(buffer).littleEndian(); var now = new Date(); // write header bin.writeUint8(3); bin.writeUint8(now.getFullYear() - 1900); bin.writeUint8(now.getMonth() + 1); bin.writeUint8(now.getDate()); bin.writeUint32(rows); bin.writeUint16(headerBytes); bin.writeUint16(recordBytes); bin.skipBytes(17); bin.writeUint8(0); // language flag; TODO: improve this bin.skipBytes(2); // field subrecords fieldData.reduce(function(recordOffset, obj, i) { var fieldName = uniqFields[i]; bin.writeCString(fieldName, 11); bin.writeUint8(obj.type.charCodeAt(0)); bin.writeUint32(recordOffset); bin.writeUint8(obj.size); bin.writeUint8(obj.decimals); bin.skipBytes(14); return recordOffset + obj.size; }, 1); bin.writeUint8(0x0d); // "field descriptor terminator" if (bin.position() != headerBytes) { error("Dbf#exportRecords() header size mismatch; expected:", headerBytes, "written:", bin.position()); } arr.forEach(function(rec, i) { var start = bin.position(); bin.writeUint8(0x20); // delete flag; 0x20 valid 0x2a deleted for (var j=0, n=fieldData.length; j MAX_FIELD_SIZE) { size = MAX_FIELD_SIZE; info.decimals -= size - MAX_FIELD_SIZE; if (info.decimals < 0) { error ("Dbf#getFieldInfo() Out-of-range error."); } } info.size = size; var formatter = Dbf.getDecimalFormatter(size, info.decimals); info.write = function(i, bin) { var rec = arr[i], str = formatter(rec[name]); if (str.length < size) { str = utils.lpad(str, size, ' '); } bin.writeString(str, size); }; }; Dbf.initBooleanField = function(info, arr, name) { info.size = 1; info.write = function(i, bin) { var val = arr[i][name], c; if (val === true) c = 'T'; else if (val === false) c = 'F'; else c = '?'; bin.writeString(c); }; }; Dbf.initDateField = function(info, arr, name) { info.size = 8; info.write = function(i, bin) { var d = arr[i][name], str; if (d instanceof Date === false) { str = '00000000'; } else { str = utils.lpad(d.getUTCFullYear(), 4, '0') + utils.lpad(d.getUTCMonth() + 1, 2, '0') + utils.lpad(d.getUTCDate(), 2, '0'); } bin.writeString(str); }; }; Dbf.initStringField = function(info, arr, name, encoding) { var formatter = Dbf.getStringWriter(encoding); var size = 0; var values = arr.map(function(rec) { var buf = formatter(rec[name]); size = Math.max(size, buf.byteLength); return buf; }); info.size = size; info.write = function(i, bin) { var buf = values[i], bytes = Math.min(size, buf.byteLength), idx = bin.position(); bin.writeBuffer(buf, bytes, 0); bin.position(idx + size); }; }; Dbf.getFieldInfo = function(arr, name, encoding) { var type = this.discoverFieldType(arr, name), info = { name: name, type: type, decimals: 0 }; if (type == 'N') { Dbf.initNumericField(info, arr, name); } else if (type == 'C') { Dbf.initStringField(info, arr, name, encoding); } else if (type == 'L') { Dbf.initBooleanField(info, arr, name); } else if (type == 'D') { Dbf.initDateField(info, arr, name); } else { // Treat null fields as empty numeric fields; this way, they will be imported // again as nulls. info.size = 0; info.type = 'N'; info.write = function() {}; } return info; }; Dbf.discoverFieldType = function(arr, name) { var val; for (var i=0, n=arr.length; i max) max = val; while (val * k % 1 !== 0) { if (decimals == limit) { // TODO: verify limit, remove oflo message, round overflowing values // trace ("#getNumericFieldInfo() Number field overflow; value:", val); break; } decimals++; k *= 10; } if (decimals > maxDecimals) maxDecimals = decimals; } return { decimals: maxDecimals, min: min, max: max }; }; // Return function to convert a JS str to an ArrayBuffer containing encoded str. Dbf.getStringWriter = function(encoding) { if (encoding === 'ascii') { return Dbf.getStringWriterAscii(); } else { return Dbf.getStringWriterEncoded(encoding); } }; // TODO: handle non-ascii chars. Option: switch to // utf8 encoding if non-ascii chars are found. Dbf.getStringWriterAscii = function() { return function(val) { var str = String(val), n = Math.min(str.length, Dbf.MAX_STRING_LEN), dest = new ArrayBuffer(n), view = new Uint8ClampedArray(dest); for (var i=0; i= Dbf.MAX_STRING_LEN) { buf = Dbf.truncateEncodedString(buf, encoding, Dbf.MAX_STRING_LEN); } return BinArray.toArrayBuffer(buf); }; }; // try to remove partial multi-byte characters from the end of an encoded string. Dbf.truncateEncodedString = function(buf, encoding, maxLen) { var truncated = buf.slice(0, maxLen); var len = maxLen; var tmp, str; while (len > 0 && len >= maxLen - 3) { tmp = len == maxLen ? truncated : buf.slice(0, len); str = MapShaper.decodeString(tmp, encoding); if (str.charAt(str.length-1) != '\ufffd') { truncated = tmp; break; } len--; } return truncated; }; var dataFieldRxp = /^[a-zA-Z_][a-zA-Z_0-9]*$/; function DataTable(obj) { var records; if (utils.isArray(obj)) { records = obj; } else { records = []; // integer object: create empty records if (utils.isInteger(obj)) { for (var i=0; i 0) { xx.pop(); yy.pop(); count--; } if (count > 0) { shp2.push([nn.length]); nn.push(count); } }); return { shape: shp2.length > 0 ? shp2 : null, arcs: new ArcCollection(nn, xx, yy) }; }; MapShaper.simplifyPathFast = function(path, arcs, dist, xx, yy) { var iter = arcs.getShapeIter(path), count = 0, prevX, prevY, x, y; while (iter.hasNext()) { x = iter.x; y = iter.y; if (count === 0 || distance2D(x, y, prevX, prevY) > dist) { xx.push(x); yy.push(y); prevX = x; prevY = y; count++; } } if (x != prevX || y != prevY) { xx.push(x); yy.push(y); count++; } return count; }; // Get the centroid of the largest ring of a polygon // TODO: Include holes in the calculation // TODO: Add option to find centroid of all rings, not just the largest geom.getShapeCentroid = function(shp, arcs) { var maxPath = geom.getMaxPath(shp, arcs); return maxPath ? geom.getPathCentroid(maxPath, arcs) : null; }; geom.getPathCentroid = function(ids, arcs) { var iter = arcs.getShapeIter(ids), sum = 0, sumX = 0, sumY = 0, ax, ay, tmp, area; if (!iter.hasNext()) return null; ax = iter.x; ay = iter.y; while (iter.hasNext()) { tmp = ax * iter.y - ay * iter.x; sum += tmp; sumX += tmp * (iter.x + ax); sumY += tmp * (iter.y + ay); ax = iter.x; ay = iter.y; } area = sum / 2; if (area === 0) { return geom.getAvgPathXY(ids, arcs); } else return { x: sumX / (6 * area), y: sumY / (6 * area) }; }; // Find a point inside a polygon and located away from the polygon edge // Method: // - get the largest ring of the polygon // - get an array of x-values distributed along the horizontal extent of the ring // - for each x: // intersect a vertical line with the polygon at x // find midpoints of each intersecting segment // - for each midpoint: // adjust point vertically to maximize weighted distance from polygon edge // - return the adjusted point having the maximum weighted distance from the edge // // (distance is weighted to slightly favor points near centroid) // geom.findInteriorPoint = function(shp, arcs) { var maxPath = shp && geom.getMaxPath(shp, arcs), pathBounds = maxPath && arcs.getSimpleShapeBounds(maxPath), thresh, simple; if (!pathBounds || !pathBounds.hasBounds() || pathBounds.area() === 0) { return null; } thresh = Math.sqrt(pathBounds.area()) * 0.01; simple = MapShaper.simplifyPolygonFast(shp, arcs, thresh); if (!simple.shape) { return null; // collapsed shape } return geom.findInteriorPoint2(simple.shape, simple.arcs); }; // Assumes: shp is a polygon with at least one space-enclosing ring geom.findInteriorPoint2 = function(shp, arcs) { var maxPath = geom.getMaxPath(shp, arcs); var pathBounds = arcs.getSimpleShapeBounds(maxPath); var centroid = geom.getPathCentroid(maxPath, arcs); var weight = MapShaper.getPointWeightingFunction(centroid, pathBounds); var area = geom.getPlanarPathArea(maxPath, arcs); var hrange, lbound, rbound, focus, htics, hstep, p, p2; // Limit test area if shape is simple and squarish if (shp.length == 1 && area * 1.2 > pathBounds.area()) { htics = 5; focus = 0.2; } else if (shp.length == 1 && area * 1.7 > pathBounds.area()) { htics = 7; focus = 0.4; } else { htics = 11; focus = 0.5; } hrange = pathBounds.width() * focus; lbound = centroid.x - hrange / 2; rbound = lbound + hrange; hstep = hrange / htics; // Find a best-fit point p = MapShaper.probeForBestInteriorPoint(shp, arcs, lbound, rbound, htics, weight); if (!p) { verbose("[points inner] failed, falling back to centroid"); p = centroid; } else { // Look for even better fit close to best-fit point p2 = MapShaper.probeForBestInteriorPoint(shp, arcs, p.x - hstep / 2, p.x + hstep / 2, 2, weight); if (p2.distance > p.distance) { p = p2; } } return p; }; MapShaper.getPointWeightingFunction = function(centroid, pathBounds) { // Get a factor for weighting a candidate point // Points closer to the centroid are slightly preferred var referenceDist = Math.max(pathBounds.width(), pathBounds.height()) / 2; return function(x, y) { var offset = distance2D(centroid.x, centroid.y, x, y); return 1 - Math.min(0.6 * offset / referenceDist, 0.25); }; }; MapShaper.findInteriorPointCandidates = function(shp, arcs, xx) { var ymin = arcs.getBounds().ymin - 1; return xx.reduce(function(memo, x) { var cands = MapShaper.findHitCandidates(x, ymin, shp, arcs); return memo.concat(cands); }, []); }; MapShaper.probeForBestInteriorPoint = function(shp, arcs, lbound, rbound, htics, weight) { var tics = MapShaper.getInnerTics(lbound, rbound, htics); var interval = (rbound - lbound) / htics; // Get candidate points, distributed along x-axis var candidates = MapShaper.findInteriorPointCandidates(shp, arcs, tics); var bestP, adjustedP, candP; // Sort candidates so points at the center of longer segments are tried first candidates.forEach(function(p) { p.interval *= weight(p.x, p.y); }); candidates.sort(function(a, b) { return b.interval - a.interval; }); for (var i=0; i candP.interval) { break; } adjustedP = MapShaper.getAdjustedPoint(candP.x, candP.y, shp, arcs, interval, weight); if (!bestP || adjustedP.distance > bestP.distance) { bestP = adjustedP; } } return bestP; }; // [x, y] is a point assumed to be inside a polygon @shp // Try to move the point farther from the polygon edge MapShaper.getAdjustedPoint = function(x, y, shp, arcs, vstep, weight) { var p = { x: x, y: y, distance: geom.getPointToShapeDistance(x, y, shp, arcs) * weight(x, y) }; MapShaper.scanForBetterPoint(p, shp, arcs, vstep, weight); // scan up MapShaper.scanForBetterPoint(p, shp, arcs, -vstep, weight); // scan down return p; }; // Try to find a better-fit point than @p by scanning vertically // Modify p in-place MapShaper.scanForBetterPoint = function(p, shp, arcs, vstep, weight) { var x = p.x, y = p.y, dmax = p.distance, d; while (true) { y += vstep; d = geom.getPointToShapeDistance(x, y, shp, arcs) * weight(x, y); // overcome vary small local minima if (d > dmax * 0.90 && geom.testPointInPolygon(x, y, shp, arcs)) { if (d > dmax) { p.distance = dmax = d; p.y = y; } } else { break; } } }; // Return array of points at the midpoint of each line segment formed by the // intersection of a vertical ray at [x, y] and a polygon shape MapShaper.findHitCandidates = function(x, y, shp, arcs) { var yy = MapShaper.findRayShapeIntersections(x, y, shp, arcs); var cands = [], y1, y2, interval; // sorting by y-coord organizes y-intercepts into interior segments utils.genericSort(yy); for (var i=0; i 0) { cands.push({ y: (y1 + y2) / 2, x: x, interval: interval }); } } return cands; }; // Return array of y-intersections between vertical ray with origin at [x, y] // and a polygon MapShaper.findRayShapeIntersections = function(x, y, shp, arcs) { if (!shp) return []; return shp.reduce(function(memo, path) { var yy = MapShaper.findRayRingIntersections(x, y, path, arcs); return memo.concat(yy); }, []); }; // Return array of y-intersections between vertical ray and a polygon ring MapShaper.findRayRingIntersections = function(x, y, path, arcs) { var yints = []; MapShaper.forEachPathSegment(path, arcs, function(a, b, xx, yy) { var result = geom.getRayIntersection(x, y, xx[a], yy[a], xx[b], yy[b]); if (result > -Infinity) { yints.push(result); } }); // Ignore odd number of intersections -- probably caused by a ray that touches // but doesn't cross the ring // TODO: improve method to handle edge case with two touches and no crosses. if (yints.length % 2 === 1) { yints = []; } return yints; }; // TODO: find better home + name for this MapShaper.getInnerTics = function(min, max, steps) { var range = max - min, step = range / (steps + 1), arr = []; for (var i = 1; i<=steps; i++) { arr.push(min + step * i); } return arr; }; // Compiled expression returns a value MapShaper.compileValueExpression = function(exp, lyr, arcs) { return MapShaper.compileFeatureExpression(exp, lyr, arcs, true); }; MapShaper.compileFeatureExpression = function(rawExp, lyr, arcs, returns) { var exp = rawExp || '', vars = MapShaper.getAssignedVars(exp), func, records; if (vars.length > 0 && !lyr.data) { MapShaper.initDataTable(lyr); } records = lyr.data ? lyr.data.getRecords() : []; func = MapShaper.getExpressionFunction(exp, lyr, arcs, returns); return function(recId) { var record = records[recId]; if (!record) { record = records[recId] = {}; } // initialize new fields to null so assignments work for (var i=0; i 0) { if (!memo) { memo = {sum: weight, centroid: p.concat()}; } else { sum = memo.sum + weight; k = memo.sum / sum; memo.centroid[0] = k * memo.centroid[0] + weight * x / sum; memo.centroid[1] = k * memo.centroid[1] + weight * y / sum; if (p.length == 3) { memo.centroid[2] = k * memo.centroid[2] + weight * p[2] / sum; } memo.sum = sum; } } return memo; } function dissolvePolygonGeometry(shapes, getGroupId) { var segments = dissolveFirstPass(shapes, getGroupId); return dissolveSecondPass(segments, shapes, getGroupId); } // First pass -- identify pairs of segments that can be dissolved function dissolveFirstPass(shapes, getGroupId) { var groups = [], largeGroups = [], segments = [], ids = shapes.map(function(shp, i) { return getGroupId(i); }); MapShaper.traversePaths(shapes, procArc); largeGroups.forEach(splitGroup); return segments; function procArc(obj) { var arcId = obj.arcId, idx = arcId < 0 ? ~arcId : arcId, segId = segments.length, group = groups[idx]; if (!group) { group = []; groups[idx] = group; } group.push(segId); obj.group = group; segments.push(obj); // Three or more segments sharing the same arc is abnormal topology... // Need to try to identify pairs of matching segments in each of these // groups. // if (group.length == 3) { largeGroups.push(group); } } function findMatchingPair(group, cb) { var arc1, arc2; for (var i=0; i 2) splitGroup(group); } } } // Second pass -- generate dissolved shapes // function dissolveSecondPass(segments, shapes, getGroupId) { var dissolveShapes = []; segments.forEach(procSegment); return dissolveShapes; // @obj is an arc instance function procSegment(obj) { if (obj.used) return; var match = findDissolveArc(obj); if (!match) buildRing(obj); } function addRing(arcs, i) { if (i in dissolveShapes === false) { dissolveShapes[i] = []; } dissolveShapes[i].push(arcs); } // Generate a dissolved ring // @firstArc the first arc instance in the ring // function buildRing(firstArc) { var newArcs = [firstArc.arcId], nextArc = getNextArc(firstArc); firstArc.used = true; while (nextArc && nextArc != firstArc) { newArcs.push(nextArc.arcId); nextArc.used = true; nextArc = getNextArc(nextArc); if (nextArc && nextArc != firstArc && nextArc.used) error("buildRing() topology error"); } if (!nextArc) error("buildRing() traversal error"); firstArc.used = true; addRing(newArcs, getGroupId(firstArc.shapeId)); } // Get the next arc in a dissolved polygon ring // @obj an undissolvable arc instance // function getNextArc(obj, depth) { var next = getNextSegment(obj, segments, shapes), match; depth = depth || 0; if (next != obj) { match = findDissolveArc(next); if (match) { if (depth > 100) { error ('[dissolve] deep recursion -- unhandled topology problem'); } // if (match.part.arcs.length == 1) { if (shapes[match.shapeId][match.partId].length == 1) { // case: @obj has an island inclusion -- keep traversing @obj // TODO: test case if @next is first arc in the ring next = getNextArc(next, depth + 1); } else { next = getNextArc(match, depth + 1); } } } return next; } // Look for an arc instance that can be dissolved with segment @obj // (must be going the opposite direction and have same dissolve key, etc) // Return matching segment or null if no match // function findDissolveArc(obj) { var dissolveId = getGroupId(obj.shapeId), // obj.shape.dissolveKey, match, matchId; matchId = utils.find(obj.group, function(i) { var a = obj, b = segments[i]; if (a == b || b.used || getGroupId(b.shapeId) !== dissolveId || // don't prevent rings from dissolving with themselves (risky?) // a.shapeId == b.shapeId && a.partId == b.partId || a.arcId != ~b.arcId) return false; return true; }); match = matchId === null ? null : segments[matchId]; return match; } } function getNextSegment(seg, segments, shapes) { return getSegmentByOffs(seg, segments, shapes, 1); } function getPrevSegment(seg, segments, shapes) { return getSegmentByOffs(seg, segments, shapes, -1); } function getSegmentByOffs(seg, segments, shapes, offs) { var arcs = shapes[seg.shapeId][seg.partId], partLen = arcs.length, nextOffs = (seg.i + offs) % partLen, nextSeg; if (nextOffs < 0) nextOffs += partLen; nextSeg = segments[seg.segId - seg.i + nextOffs]; if (!nextSeg || nextSeg.shapeId != seg.shapeId) error("index error"); return nextSeg; } // Generate a dissolved layer // @opts.field (optional) name of data field (dissolves all if falsy) // @opts.sum-fields (Array) (optional) // @opts.copy-fields (Array) (optional) // api.dissolve = function(lyr, arcs, o) { var opts = o || {}, getGroupId = MapShaper.getCategoryClassifier(opts.field, lyr.data), dissolveShapes = null, dissolveData = null, lyr2; if (lyr.geometry_type == 'polygon') { dissolveShapes = dissolvePolygonGeometry(lyr.shapes, getGroupId); } else if (lyr.geometry_type == 'point') { dissolveShapes = dissolvePointLayerGeometry(lyr, getGroupId, opts); } else if (lyr.geometry_type) { stop("[dissolve] Only point and polygon geometries can be dissolved"); } if (lyr.data) { dissolveData = MapShaper.aggregateDataRecords(lyr.data.getRecords(), getGroupId, opts); // replace missing shapes with nulls for (var i=0, n=dissolveData.length; i 0 ? dissolved : null; }; }; // TODO: to prevent invalid holes, // could erase the holes from the space-enclosing rings. MapShaper.appendHolestoRings = function(cw, ccw) { for (var i=0, n=ccw.length; i