[
  {
    "path": ".gitattributes",
    "content": "\n*.gz filter=lfs diff=lfs merge=lfs -text\n*.gif filter=lfs diff=lfs merge=lfs -text\n*.json filter=lfs diff=lfs merge=lfs -text\n"
  },
  {
    "path": ".gitignore",
    "content": "/RUNNING_PID\n/logs/\nproject/project/\nproject/target/\ntarget/\n.idea\n.tmp\n"
  },
  {
    "path": ".travis.yml",
    "content": "language: scala\nscala:\n  - 2.10.6\n  - 2.11.7\njdk:\n  - oraclejdk8\n  - oraclejdk7\n  - openjdk7\n"
  },
  {
    "path": "LICENSE",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"{}\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright 2015 Forest Fang\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n\n===================================\n\nThis project also contains code from TSne.jl and d3.js.\nLicense can be found at: \n  https://github.com/lejon/TSne.jl/blob/master/LICENSE.md \nand \n  https://github.com/mbostock/d3/blob/master/LICENSE\nrespectively.\n"
  },
  {
    "path": "README.md",
    "content": "# spark-tsne\n\n[![Join the chat at https://gitter.im/saurfang/spark-tsne](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/saurfang/spark-tsne?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) [![Build Status](https://travis-ci.org/erwinvaneijk/spark-tsne.svg?branch=master)](https://travis-ci.org/erwinvaneijk/spark-tsne)\nDistributed [t-SNE](http://lvdmaaten.github.io/tsne/) with Apache Spark. WIP...\n\nt-SNE is a dimension reduction technique that is particularly good for visualizing high\ndimensional data. This is an attempt to implement this algorithm using Spark to leverage\ndistributed computing power.\n\nThe project is still in progress of replicating reference implementations from the original\npapers. Spark specific optimizations will be the next goal once the correctness is verified.\n\nCurrently I'm showcasing this using the standard [MNIST](http://yann.lecun.com/exdb/mnist/)\nhandwriting recognition dataset. I have created a [WebGL player](https://saurfang.github.io/spark-tsne-demo/tsne-pixi.html)\n(built using [pixi.js](https://github.com/pixijs/pixi.js)) to visualize the inner workings\nas well as the final results of t-SNE. If a WebGL is unavailable for you, you may checkout\nthe [d3.js player](https://saurfang.github.io/spark-tsne-demo/tsne.html) instead.\n\n![](data/mnist/tsne.gif)\n\n## Credits\n\n- [t-SNE Julia implementation](https://github.com/lejon/TSne.jl)\n- [Barnes-Hut t-SNE](https://github.com/lvdmaaten/bhtsne/)\n"
  },
  {
    "path": "build.sbt",
    "content": "import Common._\n\nlazy val root = Project(\"spark-tsne\", file(\".\")).\n  settings(commonSettings: _*).\n  aggregate(core, vis, examples)\n\nlazy val core = tsneProject(\"spark-tsne-core\").\n  settings(Dependencies.core)\n\nlazy val vis = tsneProject(\"spark-tsne-player\").\n  dependsOn(core)\n\nlazy val examples = tsneProject(\"spark-tsne-examples\").\n  dependsOn(core, vis).\n  settings(fork in run := true).\n  settings(Dependencies.core).\n  settings(SparkSubmit.settings: _*)\n"
  },
  {
    "path": "data/mnist/tsne.R",
    "content": "library(dplyr)\nlibrary(ggplot2)\nlibrary(animation)\nlibrary(jsonlite)\n\nresultFiles <- list.files(\"~/GitHub/spark-tsne/.tmp/MNIST/\", \"result\", full.names = TRUE)\nresults <- lapply(resultFiles, function(file) { read.csv(file, FALSE) })\nresultsCombined <- lapply(1:length(results), function(i) {\n  result <- results[[i]]\n  names(result)  <- c(\"label\", \"x\", \"y\")\n  mutate(result, i = i, key = row_number())\n}) %>%\n  rbind_all()\n\n#### save results as json for viewer ####\niterations <- c(1:99, seq(100, length(results), 5)) # assume 100 early exaggeration here\nresultsByObs <- filter(resultsCombined, i %in% iterations) %>%\n  group_by(key) %>%\n#   do({\n#     list(key = unbox(.$key[1]), label = unbox(.$label[1]),\n#          # assume order will preserve\n#          pos = select(., x, y)) %>%\n#     data_frame\n#   })\n  do(key = unbox(.$key[1]),\n     label = unbox(.$label[1]),\n     pos = select(., x, y))\nwrite(toJSON(list(iterations = iterations, data = resultsByObs)), \"mnist.json\")\n\n#### save plot as animated gif ####\ncomputeLimit <- function(f, cumf) {\n  cumf(lapply(results, f))\n}\n\nxmax <- computeLimit(. %>% {max(abs(.$V2))}, cummax)\nymax <- computeLimit(. %>% {max(abs(.$V3))}, cummax)\n\nplotResult <- function(i) {\n  ggplot(results[[i]]) +\n    aes(V2, V3, color = as.factor(V1), label = V1) +\n    #geom_point() +\n    geom_text() +\n    xlim(-xmax[i], xmax[i]) +\n    ylim(-ymax[i], ymax[i])\n}\n\ntraceAnimate <- function(n = length(results), step = 1) {\n  lapply(seq(1, n, step), function(i) {\n    print(plotResult(i))\n  })\n}\n\nfile.remove(\"tsne.gif\")\nsaveGIF(traceAnimate(step = 5), interval = 0.05, movie.name = \"tsne.gif\", loop = 1)\n"
  },
  {
    "path": "project/Common.scala",
    "content": "import sbt._\nimport Keys._\nimport com.typesafe.sbt.GitPlugin.autoImport._\n\nimport scala.language.experimental.macros\nimport scala.reflect.macros.Context\n\nobject Common {\n  val commonSettings = Seq(\n    organization in ThisBuild := \"com.github.saurfang\",\n    javacOptions ++= Seq(\"-source\", \"1.7\", \"-target\", \"1.7\"),\n    scalacOptions ++= Seq(\"-target:jvm-1.7\", \"-deprecation\", \"-feature\"),\n    //git.useGitDescribe := true,\n    git.baseVersion := \"0.0.1\",\n    parallelExecution in test := false,\n    updateOptions := updateOptions.value.withCachedResolution(true)\n  )\n\n  def tsneProject(path: String): Project = macro tsneProjectMacroImpl\n\n  def tsneProjectMacroImpl(c: Context)(path: c.Expr[String]) = {\n    import c.universe._\n    reify {\n      (Project.projectMacroImpl(c).splice in file(path.splice)).\n        settings(name := path.splice).\n        settings(Dependencies.Versions).\n        settings(commonSettings: _*)\n    }\n  }\n}"
  },
  {
    "path": "project/Dependencies.scala",
    "content": "import sbt._\nimport Keys._\n\nobject Dependencies {\n  val Versions = Seq(\n    crossScalaVersions := Seq(\"2.11.8\", \"2.10.5\"),\n    scalaVersion := crossScalaVersions.value.head\n  )\n\n  object Compile {\n    val spark = \"org.apache.spark\" %% \"spark-mllib\" % \"2.1.0\" % \"provided\"\n    val breeze_natives = \"org.scalanlp\" %% \"breeze-natives\" % \"0.11.2\" % \"provided\"\n    val logging = Seq(\n      \"org.slf4j\" % \"slf4j-api\" % \"1.7.16\",\n      \"org.slf4j\" % \"slf4j-log4j12\" % \"1.7.16\")\n\n    object Test {\n      val scalatest = \"org.scalatest\" %% \"scalatest\" % \"3.0.0\" % \"test\"\n    }\n  }\n\n  import Compile._\n  val l = libraryDependencies\n\n  val core = l ++= Seq(spark, breeze_natives, Test.scalatest) ++ logging\n}\n"
  },
  {
    "path": "project/SparkSubmit.scala",
    "content": "import sbtsparksubmit.SparkSubmitPlugin.autoImport._\n\nobject SparkSubmit {\n  lazy val settings =\n    SparkSubmitSetting(\"sparkMNIST\",\n      Seq(\n        \"--master\", \"local[3]\",\n        \"--class\", \"com.github.saurfang.spark.tsne.examples.MNIST\"\n      )\n    )\n}\n"
  },
  {
    "path": "project/build.properties",
    "content": "sbt.version=0.13.13\n"
  },
  {
    "path": "project/plugins.sbt",
    "content": "addSbtPlugin(\"com.github.gseitz\" % \"sbt-release\" % \"1.0.0\")\n\naddSbtPlugin(\"me.lessis\" % \"bintray-sbt\" % \"0.2.1\")\n\naddSbtPlugin(\"com.typesafe.sbt\" % \"sbt-git\" % \"0.8.4\")\n\naddSbtPlugin(\"com.eed3si9n\" % \"sbt-assembly\" % \"0.13.0\")\n\naddSbtPlugin(\"com.github.saurfang\" % \"sbt-spark-submit\" % \"0.0.4\")\n"
  },
  {
    "path": "spark-tsne-core/src/main/scala/com/github/saurfang/spark/tsne/TSNEGradient.scala",
    "content": "package com.github.saurfang.spark.tsne\n\nimport breeze.linalg._\nimport breeze.numerics._\nimport com.github.saurfang.spark.tsne.tree.SPTree\nimport org.slf4j.LoggerFactory\n\nobject TSNEGradient {\n  def logger = LoggerFactory.getLogger(TSNEGradient.getClass)\n\n  /**\n    * Compute the numerator from the matrix Y\n    *\n    * @param idx the index in the matrix to use.\n    * @param Y the matrix to analyze\n    * @return the numerator\n    */\n  def computeNumerator(Y: DenseMatrix[Double], idx: Int *): DenseMatrix[Double] = {\n    // Y_sum = ||Y_i||^2\n    val sumY = sum(pow(Y, 2).apply(*, ::)) // n * 1\n    val subY = Y(idx, ::).toDenseMatrix // k * 1\n    val y1: DenseMatrix[Double] = Y * (-2.0 :* subY.t) // n * k\n    val num: DenseMatrix[Double] = (y1(::, *) + sumY).t // k * n\n    num := 1.0 :/ (1.0 :+ (num(::, *) + sumY(idx).toDenseVector)) // k * n\n\n    idx.indices.foreach(i => num.update(i, idx(i), 0.0)) // num(i, i) = 0\n\n    num\n  }\n\n  /**\n   * Compute the TSNE Gradient at i. Update the gradient through dY then return costs attributed at i.\n   *\n   * @param data data point for row i by list of pair of (j, p_ij) and 0 <= j < n\n   * @param Y current Y [n * 2]\n   * @param totalNum the common numerator that captures the t-distribution of Y\n   * @param dY gradient of Y\n   * @return loss attributed to row i\n   */\n  def compute(\n               data: Array[(Int, Iterable[(Int, Double)])],\n               Y: DenseMatrix[Double],\n               num: DenseMatrix[Double],\n               totalNum: Double,\n               dY: DenseMatrix[Double],\n               exaggeration: Boolean): Double = {\n    // q = (1 + ||Y_i - Y_j||^2)^-1 / sum(1 + ||Y_k - Y_l||^2)^-1\n    val q: DenseMatrix[Double] = num / totalNum\n    q.foreachPair{case ((i, j), v) => q.update(i, j, math.max(v, 1e-12))}\n\n    // q = q - p\n    val loss = data.zipWithIndex.flatMap {\n      case ((_, itr), i) =>\n        itr.map{\n          case (j, p) =>\n            val exaggeratedP = if(exaggeration) p * 4 else p\n            val qij = q(i, j)\n            val l = exaggeratedP * math.log(exaggeratedP / qij)\n            q.update(i, j,  qij - exaggeratedP)\n            if(l.isNaN) 0.0 else l\n        }\n    }.sum\n\n    // l = [ (p_ij - q_ij) * (1 + ||Y_i - Y_j||^2)^-1 ]\n    q :*= -num\n    // l_sum = [0 0 ... sum(l) ... 0]\n    sum(q(*, ::)).foreachPair{ case (i, v) => q.update(i, data(i)._1, q(i, data(i)._1) - v) }\n\n    // dY_i = -4 * (l - l_sum) * Y\n    val dYi: DenseMatrix[Double] = -4.0 :* (q * Y)\n    data.map(_._1).zipWithIndex.foreach{\n      case (i, idx) => dY(i, ::) := dYi(idx, ::)\n    }\n\n    loss\n  }\n\n  /** BH Tree related functions **/\n\n  /**\n   *\n   * @param data array of (row_id, Seq(col_id), Vector(P_ij))\n   * @param Y matrix\n   * @param posF positive forces\n   */\n  def computeEdgeForces(data: Array[(Int, Seq[Int], DenseVector[Double])],\n              Y: DenseMatrix[Double],\n              posF: DenseMatrix[Double]): Unit = {\n    data.foreach {\n      case (i, cols, vec) =>\n        // k x D - 1 x D  => k x D\n        val diff = Y(cols, ::).toDenseMatrix.apply(*, ::) - Y(i, ::).t\n        // k x D => k x 1\n        val qZ = 1.0 :+ sum(pow(diff, 2).apply(*, ::))\n        posF(i, ::) := (vec :/ qZ).t * (-diff)\n    }\n  }\n\n  def computeNonEdgeForces(tree: SPTree,\n                           Y: DenseMatrix[Double],\n                           theta: Double,\n                           negF: DenseMatrix[Double],\n                           idx: Int *): Double = {\n    idx.foldLeft(0.0)((acc, i) => acc + computeNonEdgeForce(tree, Y(i, ::).t, theta, negF, i))\n  }\n\n  /**\n   * Calcualte negative forces using BH approximation\n   *\n   * @param tree SPTree used for approximation\n   * @param y y_i\n   * @param theta threshold for correctness / speed\n   * @param negF negative forces\n   * @param i row\n   * @return sum of Q\n   */\n  private def computeNonEdgeForce(tree: SPTree,\n                                  y: DenseVector[Double],\n                                  theta: Double,\n                                  negF: DenseMatrix[Double],\n                                  i: Int): Double = {\n    import tree._\n    if(getCount == 0 || (isLeaf && center.equals(y))) {\n      0.0\n    } else {\n      val diff = y - center\n      val diffSq = sum(pow(diff, 2))\n      if(isLeaf || radiusSq / diffSq < theta) {\n        val qZ = 1 / (1 + diffSq)\n        val nqZ = getCount * qZ\n        negF(i, ::) :+= (nqZ * qZ * diff).t\n        nqZ\n      } else {\n        children.foldLeft(0.0)((acc, child) => acc + computeNonEdgeForce(child, y, theta, negF, i))\n      }\n    }\n  }\n\n  def computeLoss(data: Array[(Int, Seq[Int], DenseVector[Double])],\n                  Y: DenseMatrix[Double],\n                  sumQ: Double): Double = {\n    data.foldLeft(0.0){\n      case (acc, (i, cols, vec)) =>\n        val diff = Y(cols, ::).toDenseMatrix.apply(*, ::) - Y(i, ::).t\n        val diffSq =  sum(pow(diff, 2).apply(*, ::))\n        val Q = (1.0 :/ (1.0 :+ diffSq)) :/ sumQ\n        sum(vec :* breeze.numerics.log(max(vec, 1e-12) :/ max(Q, 1e-12)))\n    }\n  }\n}\n"
  },
  {
    "path": "spark-tsne-core/src/main/scala/com/github/saurfang/spark/tsne/TSNEHelper.scala",
    "content": "package com.github.saurfang.spark.tsne\n\nimport breeze.linalg._\nimport breeze.stats._\nimport org.apache.spark.mllib.linalg.distributed.CoordinateMatrix\nimport org.apache.spark.rdd.RDD\n\nobject TSNEHelper {\n  // p_ij = (p_{i|j} + p_{j|i}) / 2n\n  def computeP(p_ji: CoordinateMatrix, n: Int): RDD[(Int, Iterable[(Int, Double)])] = {\n    p_ji.entries\n      .flatMap(e => Seq(\n      ((e.i.toInt, e.j.toInt), e.value),\n      ((e.j.toInt, e.i.toInt), e.value)\n    ))\n      .reduceByKey(_ + _) // p + p'\n      .map{case ((i, j), v) => (i, (j, math.max(v / 2 / n, 1e-12))) } // p / 2n\n      .groupByKey()\n  }\n\n  /**\n   * Update Y via gradient dY\n   * @param Y current Y\n   * @param dY gradient dY\n   * @param iY stored y_i - y_{i-1}\n   * @param gains adaptive learning rates\n   * @param iteration n\n   * @param param [[TSNEParam]]\n   * @return\n   */\n  def update(Y: DenseMatrix[Double],\n             dY: DenseMatrix[Double],\n             iY: DenseMatrix[Double],\n             gains: DenseMatrix[Double],\n             iteration: Int,\n             param: TSNEParam): DenseMatrix[Double] = {\n    import param._\n    val momentum = if (iteration <= t_momentum) initial_momentum else final_momentum\n    gains.foreachPair {\n      case ((i, j), old_gain) =>\n        val new_gain = math.max(min_gain,\n          if ((dY(i, j) > 0.0) != (iY(i, j) > 0.0))\n            old_gain + 0.2\n          else\n            old_gain * 0.8\n        )\n        gains.update(i, j, new_gain)\n\n        val new_iY = momentum * iY(i, j) - eta * new_gain * dY(i, j)\n        iY.update(i, j, new_iY)\n\n        Y.update(i, j, Y(i, j) + new_iY) // Y += iY\n    }\n    val t_Y: DenseVector[Double] = mean(Y(::, *)).t\n    val y_sub = Y(*, ::)\n    Y := y_sub - t_Y\n  }\n}\n"
  },
  {
    "path": "spark-tsne-core/src/main/scala/com/github/saurfang/spark/tsne/TSNEParam.scala",
    "content": "package com.github.saurfang.spark.tsne\n\ncase class TSNEParam(\n                      early_exaggeration: Int = 100,\n                      exaggeration_factor: Double = 4.0,\n                      t_momentum: Int = 25,\n                      initial_momentum: Double = 0.5,\n                      final_momentum: Double = 0.8,\n                      eta: Double = 500.0,\n                      min_gain: Double = 0.01\n                      )\n"
  },
  {
    "path": "spark-tsne-core/src/main/scala/com/github/saurfang/spark/tsne/X2P.scala",
    "content": "package com.github.saurfang.spark.tsne\n\nimport breeze.linalg.DenseVector\nimport org.apache.spark.mllib.X2PHelper._\nimport org.apache.spark.mllib.linalg.Vectors\nimport org.apache.spark.mllib.linalg.distributed.{CoordinateMatrix, MatrixEntry, RowMatrix}\nimport org.apache.spark.mllib.rdd.MLPairRDDFunctions._\nimport org.slf4j.LoggerFactory\n\nobject X2P {\n\n  private def logger = LoggerFactory.getLogger(X2P.getClass)\n\n  def apply(x: RowMatrix, tol: Double = 1e-5, perplexity: Double = 30.0): CoordinateMatrix = {\n    require(tol >= 0, \"Tolerance must be non-negative\")\n    require(perplexity > 0, \"Perplexity must be positive\")\n\n    val mu = (3 * perplexity).toInt //TODO: Expose this as parameter\n    val logU = Math.log(perplexity)\n    val norms = x.rows.map(Vectors.norm(_, 2.0))\n    norms.persist()\n    val rowsWithNorm = x.rows.zip(norms).map{ case (v, norm) => VectorWithNorm(v, norm) }\n    val neighbors = rowsWithNorm.zipWithIndex()\n      .cartesian(rowsWithNorm.zipWithIndex())\n      .flatMap {\n      case ((u, i), (v, j)) =>\n        if(i < j) {\n          val dist = fastSquaredDistance(u, v)\n          Seq((i, (j, dist)), (j, (i, dist)))\n        } else Seq.empty\n    }\n      .topByKey(mu)(Ordering.by(e => -e._2))\n\n    val p_betas =\n      neighbors.map {\n        case (i, arr) =>\n          var betamin = Double.NegativeInfinity\n          var betamax = Double.PositiveInfinity\n          var beta = 1.0\n\n          val d = DenseVector(arr.map(_._2))\n          var (h, p) = Hbeta(d, beta)\n\n          //logInfo(\"data was \" + d.toArray.toList)\n          //logInfo(\"array P was \" + p.toList)\n\n          // Evaluate whether the perplexity is within tolerance\n          def Hdiff = h - logU\n          var tries = 0\n          while (Math.abs(Hdiff) > tol && tries < 50) {\n            //If not, increase or decrease precision\n            if (Hdiff > 0) {\n              betamin = beta\n              beta = if (betamax.isInfinite) beta * 2 else (beta + betamax) / 2\n            } else {\n              betamax = beta\n              beta = if (betamin.isInfinite) beta / 2 else (beta + betamin) / 2\n            }\n\n            // Recompute the values\n            val HP = Hbeta(d, beta)\n            h = HP._1\n            p = HP._2\n            tries = tries + 1\n          }\n\n          //logInfo(\"array P is \" + p.toList)\n\n          (arr.map(_._1).zip(p.toArray).map { case (j, v) => MatrixEntry(i, j, v) }, beta)\n      }\n\n    logger.info(\"Mean value of sigma: \" + p_betas.map(x => math.sqrt(1 / x._2)).mean)\n    new CoordinateMatrix(p_betas.flatMap(_._1))\n  }\n}\n"
  },
  {
    "path": "spark-tsne-core/src/main/scala/com/github/saurfang/spark/tsne/impl/BHTSNE.scala",
    "content": "package com.github.saurfang.spark.tsne.impl\n\nimport breeze.linalg._\nimport breeze.stats.distributions.Rand\nimport com.github.saurfang.spark.tsne.tree.SPTree\nimport com.github.saurfang.spark.tsne.{TSNEGradient, TSNEHelper, TSNEParam, X2P}\nimport org.apache.spark.mllib.linalg.distributed.RowMatrix\nimport org.apache.spark.storage.StorageLevel\nimport org.slf4j.LoggerFactory\n\nimport scala.util.Random\n\nobject BHTSNE {\n  private def logger = LoggerFactory.getLogger(BHTSNE.getClass)\n\n  def tsne(\n            input: RowMatrix,\n            noDims: Int = 2,\n            maxIterations: Int = 1000,\n            perplexity: Double = 30,\n            theta: Double = 0.5,\n            reportLoss: Int => Boolean = {i => i % 10 == 0},\n            callback: (Int, DenseMatrix[Double], Option[Double]) => Unit = {case _ => },\n            seed: Long = Random.nextLong()\n            ): DenseMatrix[Double] = {\n    if(input.rows.getStorageLevel == StorageLevel.NONE) {\n      logger.warn(\"Input is not persisted and performance could be bad\")\n    }\n\n    Rand.generator.setSeed(seed)\n\n    val tsneParam = TSNEParam()\n    import tsneParam._\n\n    val n = input.numRows().toInt\n    val Y: DenseMatrix[Double] = DenseMatrix.rand(n, noDims, Rand.gaussian(0, 1)) :/ 1e4\n    val iY = DenseMatrix.zeros[Double](n, noDims)\n    val gains = DenseMatrix.ones[Double](n, noDims)\n\n    // approximate p_{j|i}\n    val p_ji = X2P(input, 1e-5, perplexity)\n    val P = TSNEHelper.computeP(p_ji, n).glom()\n      .map(rows => rows.map {\n      case (i, data) =>\n        (i, data.map(_._1).toSeq, DenseVector(data.map(_._2 * exaggeration_factor).toArray))\n    })\n      .cache()\n\n      var iteration = 1\n      while(iteration <= maxIterations) {\n        val bcY = P.context.broadcast(Y)\n        val bcTree = P.context.broadcast(SPTree(Y))\n\n        val initialValue = (DenseMatrix.zeros[Double](n, noDims), DenseMatrix.zeros[Double](n, noDims), 0.0)\n        val (posF, negF, sumQ) = P.treeAggregate(initialValue)(\n          seqOp = (c, v) => {\n            // c: (pos, neg, sumQ), v: Array[(i, Seq(j), vec(Distance))]\n            TSNEGradient.computeEdgeForces(v, bcY.value, c._1)\n            val q = TSNEGradient.computeNonEdgeForces(bcTree.value, bcY.value, theta, c._2, v.map(_._1): _*)\n            (c._1, c._2, c._3 + q)\n          },\n          combOp = (c1, c2) => {\n            // c: (grad, loss)\n            (c1._1 + c2._1, c1._2 + c2._2, c1._3 + c2._3)\n          })\n        val dY: DenseMatrix[Double] = posF :- (negF :/ sumQ)\n\n        TSNEHelper.update(Y, dY, iY, gains, iteration, tsneParam)\n\n        if(reportLoss(iteration)) {\n          val loss = P.treeAggregate(0.0)(\n            seqOp = (c, v) => {\n              TSNEGradient.computeLoss(v, bcY.value, sumQ)\n            },\n            combOp = _ + _\n          )\n          logger.debug(s\"Iteration $iteration finished with $loss\")\n          callback(iteration, Y.copy, Some(loss))\n        } else {\n          logger.debug(s\"Iteration $iteration finished\")\n          callback(iteration, Y.copy, None)\n        }\n\n        bcY.destroy()\n        bcTree.destroy()\n\n        //undo early exaggeration\n        if(iteration == early_exaggeration) {\n          P.foreach {\n            rows => rows.foreach {\n              case (_, _, vec) => vec.foreachPair { case (i, v) => vec.update(i, v / exaggeration_factor) }\n            }\n          }\n        }\n\n        iteration += 1\n      }\n\n    Y\n  }\n}\n"
  },
  {
    "path": "spark-tsne-core/src/main/scala/com/github/saurfang/spark/tsne/impl/LBFGSTSNE.scala",
    "content": "package com.github.saurfang.spark.tsne.impl\n\nimport breeze.linalg._\nimport breeze.optimize.{CachedDiffFunction, DiffFunction, LBFGS}\nimport breeze.stats.distributions.Rand\nimport com.github.saurfang.spark.tsne.{TSNEGradient, X2P}\nimport org.apache.spark.mllib.linalg.distributed.RowMatrix\nimport org.apache.spark.rdd.RDD\nimport org.apache.spark.storage.StorageLevel\nimport org.slf4j.LoggerFactory\n\nimport scala.util.Random\n\n/**\n * TODO: This doesn't work at all (yet or ever).\n */\nobject LBFGSTSNE {\n  private def logger = LoggerFactory.getLogger(LBFGSTSNE.getClass)\n\n  def tsne(\n            input: RowMatrix,\n            noDims: Int = 2,\n            maxNumIterations: Int = 1000,\n            numCorrections: Int = 10,\n            convergenceTol: Double = 1e-4,\n            perplexity: Double = 30,\n            seed: Long = Random.nextLong()): DenseMatrix[Double] = {\n    if(input.rows.getStorageLevel == StorageLevel.NONE) {\n      logger.warn(\"Input is not persisted and performance could be bad\")\n    }\n\n    Rand.generator.setSeed(seed)\n\n    val n = input.numRows().toInt\n    val early_exaggeration = 100\n    val t_momentum = 250\n    val initial_momentum = 0.5\n    val final_momentum = 0.8\n    val eta = 500.0\n    val min_gain = 0.01\n\n    val Y: DenseMatrix[Double] = DenseMatrix.rand(n, noDims, Rand.gaussian) //:* .0001\n    val iY = DenseMatrix.zeros[Double](n, noDims)\n    val gains = DenseMatrix.ones[Double](n, noDims)\n\n    // approximate p_{j|i}\n    val p_ji = X2P(input, 1e-5, perplexity)\n    //logInfo(p_ji.toRowMatrix().rows.collect().toList.toString)\n    // p_ij = (p_{i|j} + p_{j|i}) / 2n\n    val P = p_ji.transpose().entries.union(p_ji.entries)\n      .map(e => ((e.i.toInt, e.j.toInt), e.value))\n      .reduceByKey(_ + _)\n      .map{case ((i, j), v) => (i, (j, v / 2 / n)) }\n      .groupByKey()\n      .glom()\n      .cache()\n\n      var iteration = 1\n\n      {\n        val costFun = new CostFun(P, n, noDims, true)\n        val lbfgs = new LBFGS[DenseVector[Double]](maxNumIterations, numCorrections, convergenceTol)\n        val states = lbfgs.iterations(new CachedDiffFunction(costFun), new DenseVector(Y.data))\n\n        while (states.hasNext) {\n          val state = states.next()\n          val loss = state.value\n          //logInfo(state.convergedReason.get.toString)\n          logger.debug(s\"Iteration $iteration finished with $loss\")\n\n          Y := asDenseMatrix(state.x, n, noDims)\n          //subscriber.onNext((iteration, Y.copy, Some(loss)))\n          iteration += 1\n        }\n      }\n\n      {\n        val costFun = new CostFun(P, n, noDims, false)\n        val lbfgs = new LBFGS[DenseVector[Double]](maxNumIterations, numCorrections, convergenceTol)\n        val states = lbfgs.iterations(new CachedDiffFunction(costFun), new DenseVector(Y.data))\n\n        while (states.hasNext) {\n          val state = states.next()\n          val loss = state.value\n          //logInfo(state.convergedReason.get.toString)\n          logger.debug(s\"Iteration $iteration finished with $loss\")\n\n          Y := asDenseMatrix(state.x, n, noDims)\n          //subscriber.onNext((iteration, Y.copy, Some(loss)))\n          iteration += 1\n        }\n      }\n\n      Y\n  }\n\n  private[this] def asDenseMatrix(v: DenseVector[Double], n: Int, noDims: Int) = {\n    v.asDenseMatrix.reshape(n, noDims)\n  }\n\n  private class CostFun(\n                         P: RDD[Array[(Int, Iterable[(Int, Double)])]],\n                         n: Int,\n                         noDims: Int,\n                         exaggeration: Boolean) extends DiffFunction[DenseVector[Double]] {\n\n    override def calculate(weights: DenseVector[Double]): (Double, DenseVector[Double]) = {\n      val bcY = P.context.broadcast(asDenseMatrix(weights, n, noDims))\n      val bcExaggeration = P.context.broadcast(exaggeration)\n\n      val numerator = P.map{ arr => TSNEGradient.computeNumerator(bcY.value, arr.map(_._1): _*) }.cache()\n      val bcNumerator = P.context.broadcast({\n        numerator.treeAggregate(0.0)(seqOp = (x, v) => x + sum(v), combOp = _ + _)\n      })\n\n      val (dY, loss) = P.zip(numerator).treeAggregate((DenseMatrix.zeros[Double](n, noDims), 0.0))(\n        seqOp = (c, v) => {\n          // c: (grad, loss), v: (Array[(i, Iterable(j, Distance))], numerator)\n          // TODO: See if we can include early_exaggeration\n          val l = TSNEGradient.compute(v._1, bcY.value, v._2, bcNumerator.value, c._1, bcExaggeration.value)\n          (c._1, c._2 + l)\n        },\n        combOp = (c1, c2) => {\n          // c: (grad, loss)\n          (c1._1 += c2._1, c1._2 + c2._2)\n        })\n\n      numerator.unpersist()\n\n      (loss, new DenseVector(dY.data))\n    }\n  }\n}\n"
  },
  {
    "path": "spark-tsne-core/src/main/scala/com/github/saurfang/spark/tsne/impl/SimpleTSNE.scala",
    "content": "package com.github.saurfang.spark.tsne.impl\n\nimport breeze.linalg._\nimport breeze.stats.distributions.Rand\nimport com.github.saurfang.spark.tsne.{TSNEGradient, TSNEHelper, TSNEParam, X2P}\nimport org.apache.spark.mllib.linalg.distributed.RowMatrix\nimport org.apache.spark.storage.StorageLevel\nimport org.slf4j.LoggerFactory\n\nimport scala.util.Random\n\nobject SimpleTSNE {\n  private def logger = LoggerFactory.getLogger(SimpleTSNE.getClass)\n\n  def tsne(\n            input: RowMatrix,\n            noDims: Int = 2,\n            maxIterations: Int = 1000,\n            perplexity: Double = 30,\n            callback: (Int, DenseMatrix[Double], Option[Double]) => Unit = {case _ => },\n            seed: Long = Random.nextLong()): DenseMatrix[Double] = {\n    if(input.rows.getStorageLevel == StorageLevel.NONE) {\n      logger.warn(\"Input is not persisted and performance could be bad\")\n    }\n\n    Rand.generator.setSeed(seed)\n\n    val tsneParam = TSNEParam()\n    import tsneParam._\n\n    val n = input.numRows().toInt\n    val Y: DenseMatrix[Double] = DenseMatrix.rand(n, noDims, Rand.gaussian(0, 1))\n    val iY = DenseMatrix.zeros[Double](n, noDims)\n    val gains = DenseMatrix.ones[Double](n, noDims)\n\n    // approximate p_{j|i}\n    val p_ji = X2P(input, 1e-5, perplexity)\n    val P = TSNEHelper.computeP(p_ji, n).glom().cache()\n\n      var iteration = 1\n      while(iteration <= maxIterations) {\n        val bcY = P.context.broadcast(Y)\n\n        val numerator = P.map{ arr => TSNEGradient.computeNumerator(bcY.value, arr.map(_._1): _*) }.cache()\n        val bcNumerator = P.context.broadcast({\n          numerator.treeAggregate(0.0)(seqOp = (x, v) => x + sum(v), combOp = _ + _)\n        })\n\n        val (dY, loss) = P.zip(numerator).treeAggregate((DenseMatrix.zeros[Double](n, noDims), 0.0))(\n          seqOp = (c, v) => {\n            // c: (grad, loss), v: (Array[(i, Iterable(j, Distance))], numerator)\n            val l = TSNEGradient.compute(v._1, bcY.value, v._2, bcNumerator.value, c._1, iteration <= early_exaggeration)\n            (c._1, c._2 + l)\n          },\n          combOp = (c1, c2) => {\n            // c: (grad, loss)\n            (c1._1 + c2._1, c1._2 + c2._2)\n          })\n\n        bcY.destroy()\n        bcNumerator.destroy()\n        numerator.unpersist()\n\n        TSNEHelper.update(Y, dY, iY, gains, iteration, tsneParam)\n\n        logger.debug(s\"Iteration $iteration finished with $loss\")\n        callback(iteration, Y.copy, Some(loss))\n        iteration += 1\n      }\n      Y\n  }\n}\n"
  },
  {
    "path": "spark-tsne-core/src/main/scala/com/github/saurfang/spark/tsne/tree/SPTree.scala",
    "content": "package com.github.saurfang.spark.tsne.tree\n\nimport breeze.linalg._\nimport breeze.numerics._\n\nimport scala.annotation.tailrec\n\n\nclass SPTree private[tree](val dimension: Int,\n              val corner: DenseVector[Double],\n              val width: DenseVector[Double]) extends Serializable {\n  private[this] val childWidth: DenseVector[Double] = width :/ 2.0\n  lazy val radiusSq: Double = sum(pow(width, 2))\n  private[tree] val totalMass: DenseVector[Double] = DenseVector.zeros(dimension)\n  private var count: Int = 0\n  private var leaf: Boolean = true\n  val center: DenseVector[Double] = DenseVector.zeros(dimension)\n\n  lazy val children: Array[SPTree] = {\n    (0 until pow(2, dimension)).toArray.map {\n      i =>\n        val bits = DenseVector(s\"%0${dimension}d\".format(i.toBinaryString.toInt).toArray.map(_.toDouble - '0'.toDouble))\n        val childCorner: DenseVector[Double] = corner + (bits :* childWidth)\n        new SPTree(dimension, childCorner, childWidth)\n    }\n  }\n\n  final def insert(vector: DenseVector[Double], finalize: Boolean = false): SPTree = {\n    totalMass += vector\n    count += 1\n\n    if(leaf) {\n      if(count == 1) { // first to leaf\n        center := vector\n      } else if(!vector.equals(center)) {\n        (1 until count).foreach(_ => getCell(center).insert(center, finalize)) //subdivide\n        leaf = false\n      }\n    }\n\n    if(finalize) computeCenter(false)\n\n    if(leaf) this else getCell(vector).insert(vector, finalize)\n  }\n\n  def computeCenter(recursive: Boolean = true): Unit = {\n    if(count > 0) {\n      center := totalMass / count.toDouble\n      if(recursive) children.foreach(_.computeCenter())\n    }\n  }\n\n  def getCell(vector: DenseVector[Double]): SPTree = {\n    val idx = ((vector - corner) :/ childWidth).data\n    children(idx.foldLeft(0)((acc, i) => acc * 2 + min(max(i.ceil.toInt - 1, 0), 1)))\n  }\n\n  def getCount: Int = count\n\n  def isLeaf: Boolean = leaf\n}\n\nobject SPTree {\n  def apply(Y: DenseMatrix[Double]): SPTree = {\n    val d = Y.cols\n    val minMaxs = minMax(Y(::, *)).t\n    val mins = minMaxs.mapValues(_._1)\n    val maxs = minMaxs.mapValues(_._2)\n\n    val tree = new SPTree(Y.cols, mins, maxs - mins)\n\n    // insert points but wait till end to compute all centers\n    //Y(*, ::).foreach(tree.insert(_, finalize = false))\n    (0 until Y.rows).foreach(i => tree.insert(Y(i, ::).t, finalize = false))\n    // compute all center of mass\n    tree.computeCenter()\n\n    tree\n  }\n}"
  },
  {
    "path": "spark-tsne-core/src/main/scala/org/apache/spark/mllib/X2PHelper.scala",
    "content": "package org.apache.spark.mllib\n\nimport breeze.linalg._\nimport breeze.numerics._\nimport org.apache.spark.mllib.linalg.{Vector, Vectors}\nimport org.apache.spark.mllib.util.MLUtils\n\n\nobject X2PHelper {\n\n  case class VectorWithNorm(vector: Vector, norm: Double)\n\n  def fastSquaredDistance(v1: VectorWithNorm, v2: VectorWithNorm): Double = {\n    MLUtils.fastSquaredDistance(v1.vector, v1.norm, v2.vector, v2.norm)\n  }\n\n  def Hbeta(D: DenseVector[Double], beta: Double = 1.0) : (Double, DenseVector[Double]) = {\n    val P: DenseVector[Double] = exp(- D * beta)\n    val sumP = sum(P)\n    if(sumP == 0) {\n      (0.0, DenseVector.zeros(D.size))\n    }else {\n      val H = log(sumP) + (beta * sum(D :* P) / sumP)\n      (H, P / sumP)\n    }\n  }\n}\n"
  },
  {
    "path": "spark-tsne-core/src/test/scala/com/github/saurfang/spark/tsne/BugDemonstrationTest.scala",
    "content": "package com.github.saurfang.spark.tsne\n\nimport org.apache.spark.mllib.linalg.{Vectors, Vector}\nimport org.apache.spark.mllib.stat.{MultivariateStatisticalSummary, Statistics}\nimport org.apache.spark.sql.SparkSession\nimport org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers}\n\n/**\n  * This test demonstrates the bug introduced when upgrading the codebase to spark 2.1.\n  *\n  * For completeness and to check regressions, it's now added to the codebase.\n  *\n  * @author erwin.vaneijk@gmail.com\n  */\nclass BugDemonstrationTest extends FunSuite with Matchers with BeforeAndAfterAll {\n  private var sparkSession : SparkSession = _\n  override def beforeAll(): Unit = {\n    super.beforeAll()\n    sparkSession = SparkSession.builder().appName(\"BugTests\").master(\"local[2]\").getOrCreate()\n  }\n\n  override def afterAll(): Unit = {\n    super.afterAll()\n    sparkSession.stop()\n  }\n\n  test(\"This demonstrates a bug was fixed in tsne-spark 2.1\") {\n    val sc = sparkSession.sparkContext\n\n    val observations = sc.parallelize(\n      Seq(\n        Vectors.dense(1.0, 10.0, 100.0),\n        Vectors.dense(2.0, 20.0, 200.0),\n        Vectors.dense(3.0, 30.0, 300.0)\n      )\n    )\n\n    // Compute column summary statistics.\n    val summary: MultivariateStatisticalSummary = Statistics.colStats(observations)\n    val expectedMean = Vectors.dense(2.0,20.0,200.0)\n    val resultMean = summary.mean\n    assertEqualEnough(resultMean, expectedMean)\n    val expectedVariance = Vectors.dense(1.0,100.0,10000.0)\n    assertEqualEnough(summary.variance, expectedVariance)\n    val expectedNumNonZeros = Vectors.dense(3.0, 3.0, 3.0)\n    assertEqualEnough(summary.numNonzeros, expectedNumNonZeros)\n  }\n\n  private def assertEqualEnough(sample: Vector, expected: Vector): Unit = {\n    expected.toArray.zipWithIndex.foreach{ case(d: Double, i: Int) =>\n      sample(i) should be (d +- 1E-12)\n    }\n  }\n}\n"
  },
  {
    "path": "spark-tsne-core/src/test/scala/com/github/saurfang/spark/tsne/TSNEGradientTest.scala",
    "content": "package com.github.saurfang.spark.tsne\n\nimport breeze.linalg._\nimport org.scalatest.{FunSuite, Matchers}\n\n/**\n * Created by forest on 7/17/15.\n */\nclass TSNEGradientTest extends FunSuite with Matchers {\n  test(\"computeNumerator should compute numerator for sub indices\") {\n    val Y = DenseMatrix.create(3, 2, (1 to 6).map(_.toDouble).toArray)\n    println(Y)\n    val num = TSNEGradient.computeNumerator(Y, 0, 2)\n    println(num)\n  }\n}\n"
  },
  {
    "path": "spark-tsne-core/src/test/scala/com/github/saurfang/spark/tsne/X2PSuite.scala",
    "content": "package com.github.saurfang.spark.tsne\n\nimport org.apache.spark.SharedSparkContext\nimport org.apache.spark.mllib.linalg.Vectors\nimport org.apache.spark.mllib.linalg.distributed.RowMatrix\nimport org.scalatest.{FunSuite, Matchers}\n\n/**\n * Created by forest on 8/16/15.\n */\nclass X2PSuite extends FunSuite with SharedSparkContext with Matchers {\n\n  test(\"Test X2P against tsne.jl implementation\") {\n    val input = new RowMatrix(\n      sc.parallelize(Seq(1 to 3, 4 to 6, 7 to 9, 10 to 12))\n        .map(x => Vectors.dense(x.map(_.toDouble).toArray))\n    )\n    val output = X2P(input, 1e-5, 2).toRowMatrix().rows.collect().map(_.toArray.toList)\n    println(output.toList)\n    //output shouldBe List(List(0, .5, .5), List(.5, 0, .5), List(.5, .5, .0))\n  }\n}\n"
  },
  {
    "path": "spark-tsne-core/src/test/scala/com/github/saurfang/spark/tsne/tree/SPTreeSpec.scala",
    "content": "package com.github.saurfang.spark.tsne.tree\n\nimport breeze.linalg._\nimport org.scalatest.{FunSpec, Matchers}\n\nclass SPTreeSpec extends FunSpec with Matchers {\n\n  describe(\"SPTree\") {\n    describe(\"with 2 dimensions (quadtree)\") {\n      val tree = new SPTree(2, DenseVector(0.0, 0.0), DenseVector(2.0, 4.0))\n      import tree._\n      it(\"should have 4 children\") {\n        children.length shouldBe 4\n      }\n      it(\"each child should have correct width\") {\n        val width = DenseVector(1.0, 2.0)\n        children.foreach(x => x.width shouldBe width)\n      }\n      it(\"children should have correct corner\") {\n        children.map(_.corner) shouldBe Array(\n          DenseVector(0.0, 0.0),\n          DenseVector(0.0, 2.0),\n          DenseVector(1.0, 0.0),\n          DenseVector(1.0, 2.0)\n        )\n      }\n      it(\"getCell should return correct cell\") {\n        getCell(DenseVector(1.0, 1.0)).corner shouldBe DenseVector(0.0, 0.0)\n        getCell(DenseVector(1.5, 1.5)).corner shouldBe DenseVector(1.0, 0.0)\n        getCell(DenseVector(2.0, 2.0)).corner shouldBe DenseVector(1.0, 0.0)\n        getCell(DenseVector(2.0, 2.5)).corner shouldBe DenseVector(1.0, 2.0)\n      }\n      it(\"should be able to be constructed from DenseMatrix\") {\n        val data = Array(\n          1.0, 1.0, 1.0, 2.0, 1.1, 1.11, 1.11, 1,\n          3.0, 1.0, 2.0, 2.0, 1.1, 1.11, 1.11, 1\n        )\n        val matrix = DenseMatrix.create[Double](data.length / 2, 2, data)\n        val tree = SPTree(matrix)\n\n        tree.getCount shouldBe matrix.rows\n        tree.children.map(_.getCount).sum shouldBe matrix.rows\n        tree.center shouldBe DenseVector(data.grouped(matrix.rows).map(x => x.sum / x.length).toArray)\n        verifyCorrectness(tree)\n      }\n    }\n  }\n\n  def verifyCorrectness(tree: SPTree): Unit = {\n    if(tree.getCount <= 1) tree.isLeaf shouldBe true\n    if(tree.getCount > 0) tree.center shouldBe (tree.totalMass / tree.getCount.toDouble)\n    if(tree.isLeaf) {\n      tree.children.foreach(_.isLeaf shouldBe true)\n      tree.children.foreach(_.getCount shouldBe 0)\n    } else {\n      tree.children.map(_.getCount).sum shouldBe tree.getCount\n      val totalMassTally = tree.children.foldLeft(DenseVector.zeros[Double](tree.dimension))((acc, t) => acc + t.totalMass)\n      (0 until tree.dimension).foreach(i => totalMassTally(i) shouldBe (tree.totalMass(i) +- 1e-5))\n      tree.children.foreach(verifyCorrectness)\n    }\n  }\n}\n"
  },
  {
    "path": "spark-tsne-core/src/test/scala/org/apache/spark/LocalSparkContext.scala",
    "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements.  See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License.  You may obtain a copy of the License at\n *\n *    http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.spark\n\nimport _root_.io.netty.util.internal.logging.{InternalLoggerFactory, Slf4JLoggerFactory}\nimport org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Suite}\n\n/** Manages a local `sc` {@link SparkContext} variable, correctly stopping it after each test. */\ntrait LocalSparkContext extends BeforeAndAfterEach with BeforeAndAfterAll { self: Suite =>\n\n  @transient var sc: SparkContext = _\n\n  override def beforeAll() {\n    InternalLoggerFactory.setDefaultFactory(new Slf4JLoggerFactory())\n    super.beforeAll()\n  }\n\n  override def afterEach() {\n    resetSparkContext()\n    super.afterEach()\n  }\n\n  def resetSparkContext(): Unit = {\n    LocalSparkContext.stop(sc)\n    sc = null\n  }\n\n}\n\nobject LocalSparkContext {\n  def stop(sc: SparkContext) {\n    if (sc != null) {\n      sc.stop()\n    }\n    // To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown\n    System.clearProperty(\"spark.driver.port\")\n  }\n\n  /** Runs `f` by passing in `sc` and ensures that `sc` is stopped. */\n  def withSpark[T](sc: SparkContext)(f: SparkContext => T): T = {\n    try {\n      f(sc)\n    } finally {\n      stop(sc)\n    }\n  }\n\n}"
  },
  {
    "path": "spark-tsne-core/src/test/scala/org/apache/spark/SharedSparkContext.scala",
    "content": "package org.apache.spark\n\nimport org.scalatest.{BeforeAndAfterAll, Suite}\n\n/** Shares a local `SparkContext` between all tests in a suite and closes it at the end */\ntrait SharedSparkContext extends BeforeAndAfterAll { self: Suite =>\n\n  @transient private var _sc: SparkContext = _\n\n  def sc: SparkContext = _sc\n\n  var conf = new SparkConf(false)\n\n  override def beforeAll() {\n    _sc = new SparkContext(\"local[4]\", \"test\", conf)\n    super.beforeAll()\n  }\n\n  override def afterAll() {\n    LocalSparkContext.stop(_sc)\n    _sc = null\n    super.afterAll()\n  }\n}\n"
  },
  {
    "path": "spark-tsne-examples/src/main/resources/log4j.properties",
    "content": "# Set everything to be logged to the console\nlog4j.rootCategory=INFO, console\nlog4j.appender.console=org.apache.log4j.ConsoleAppender\nlog4j.appender.console.target=System.err\nlog4j.appender.console.layout=org.apache.log4j.PatternLayout\nlog4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n\n\n# Settings to quiet third party logs that are too verbose\nlog4j.logger.org.spark-project.jetty=WARN\nlog4j.logger.org.spark-project.jetty.util.component.AbstractLifeCycle=ERROR\nlog4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO\nlog4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO\nlog4j.logger.org.apache.spark=WARN\nlog4j.logger.org.apache.spark.mllib=INFO\n"
  },
  {
    "path": "spark-tsne-examples/src/main/scala/com/github/saurfang/spark/tsne/examples/MNIST.scala",
    "content": "package com.github.saurfang.spark.tsne.examples\n\n\nimport java.io.{BufferedWriter, OutputStreamWriter}\n\nimport com.github.saurfang.spark.tsne.impl._\nimport com.github.saurfang.spark.tsne.tree.SPTree\nimport org.apache.hadoop.fs.{FileSystem, Path}\nimport org.apache.spark.mllib.linalg.Vectors\nimport org.apache.spark.mllib.linalg.distributed.RowMatrix\nimport org.apache.spark.{SparkConf, SparkContext}\nimport org.slf4j.LoggerFactory\n\nobject MNIST {\n  private def logger = LoggerFactory.getLogger(MNIST.getClass)\n\n  def main (args: Array[String]) {\n    val conf = new SparkConf()\n      .set(\"spark.serializer\", \"org.apache.spark.serializer.KryoSerializer\")\n      .registerKryoClasses(Array(classOf[SPTree]))\n    val sc = new SparkContext(conf)\n    val hadoopConf = sc.hadoopConfiguration\n    val fs = FileSystem.get(hadoopConf)\n\n    val dataset = sc.textFile(\"data/MNIST/mnist.csv.gz\")\n      .zipWithIndex()\n      .filter(_._2 < 6000)\n      .sortBy(_._2, true, 60)\n      .map(_._1)\n      .map(_.split(\",\"))\n      .map(x => (x.head.toInt, x.tail.map(_.toDouble)))\n      .cache()\n    //logInfo(dataset.collect.map(_._2.toList).toList.toString)\n\n    //val features = dataset.map(x => Vectors.dense(x._2))\n    //val scaler = new StandardScaler(true, true).fit(features)\n    //val scaledData = scaler.transform(features)\n    //  .map(v => Vectors.dense(v.toArray.map(x => if(x.isNaN || x.isInfinite) 0.0 else x)))\n    //  .cache()\n    val data = dataset.flatMap(_._2)\n    val mean = data.mean()\n    val std = data.stdev()\n    val scaledData = dataset.map(x => Vectors.dense(x._2.map(v => (v - mean) / std))).cache()\n\n    val labels = dataset.map(_._1).collect()\n    val matrix = new RowMatrix(scaledData)\n    val pcaMatrix = matrix.multiply(matrix.computePrincipalComponents(50))\n    pcaMatrix.rows.cache()\n\n    val costWriter = new BufferedWriter(new OutputStreamWriter(fs.create(new Path(s\".tmp/MNIST/cost.txt\"), true)))\n\n    //SimpleTSNE.tsne(pcaMatrix, perplexity = 20, maxIterations = 200)\n    BHTSNE.tsne(pcaMatrix, maxIterations = 500, callback = {\n    //LBFGSTSNE.tsne(pcaMatrix, perplexity = 10, maxNumIterations = 500, numCorrections = 10, convergenceTol = 1e-8)\n      case (i, y, loss) =>\n        if(loss.isDefined) logger.info(s\"$i iteration finished with loss $loss\")\n\n        val os = fs.create(new Path(s\".tmp/MNIST/result${\"%05d\".format(i)}.csv\"), true)\n        val writer = new BufferedWriter(new OutputStreamWriter(os))\n        try {\n          (0 until y.rows).foreach {\n            row =>\n              writer.write(labels(row).toString)\n              writer.write(y(row, ::).inner.toArray.mkString(\",\", \",\", \"\\n\"))\n          }\n          if(loss.isDefined) costWriter.write(loss.get + \"\\n\")\n        } finally {\n          writer.close()\n        }\n    })\n    costWriter.close()\n\n    sc.stop()\n  }\n}\n"
  },
  {
    "path": "spark-tsne-player/src/main/html/tsne.html",
    "content": "<!DOCTYPE html>\n<html>\n<meta charset='utf-8'>\n<title>t-SNE Viewer</title>\n<style>\n\n#chart {\n  margin-left: -40px;\n  height: 650px;\n}\n\ntext {\n  font: 10px sans-serif;\n}\n\n.axis path, .axis line {\n  fill: none;\n  stroke: #000;\n  shape-rendering: crispEdges;\n}\n\n.label {\n  fill: #777;\n}\n\n.iteration.label {\n  font: 500 196px 'Helvetica Neue';\n  fill: #ddd;\n}\n\n.iteration.label.active {\n  fill: #aaa;\n}\n\n.overlay {\n  fill: none;\n  pointer-events: all;\n  cursor: ew-resize;\n}\n\n</style>\n\n<h1>T-SNE Viewer</h1>\n\n<p id='chart'></p>\n\n<aside>Mouseover the iteration to move forward and backwards through time.</aside>\n\n<p class='attribution'>Source: <a href='http://bost.ocks.org/mike/nations'>The Wealth & Health of Nations</a>, <a href='http://bost.ocks.org/mike/'>Mike Bostock</a>.</p>\n\n<script src='https://cdnjs.cloudflare.com/ajax/libs/d3/3.5.6/d3.min.js'></script>\n<script src='tsne-json.js'></script>\n<script>\n\n// Various accessors that specify the four dimensions of data to visualize.\nfunction x(d) { return d.x; }\nfunction y(d) { return d.y; }\nfunction text(d) { return d.label; }\nfunction color(d) { return d.label; }\nfunction key(d) { return d.key; }\n\n// Chart dimensions.\nvar margin = {top: 19.5, right: 19.5, bottom: 19.5, left: 99.5},\n    width = 640 - margin.right,\n    height = 640 - margin.top - margin.bottom;\n\n// Various scales. These domains make assumptions of data, naturally.\nvar xScale = d3.scale.linear().domain([-20, 20]).range([0, width]),\n    yScale = d3.scale.linear().domain([-20, 20]).range([height, 0]),\n    colorScale = d3.scale.category10();\n\n// The x & y axes.\nvar xAxis = d3.svg.axis().orient('bottom').scale(xScale).ticks(12, d3.format(',d')),\n    yAxis = d3.svg.axis().orient('left').scale(yScale).ticks(12, d3.format(',d'));\n\n// Create the SVG container and set the origin.\nvar svg = d3.select('#chart').append('svg')\n    .attr('width', width + margin.left + margin.right)\n    .attr('height', height + margin.top + margin.bottom)\n  .append('g')\n    .attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n// Add the x-axis.\nsvg.append('g')\n    .attr('class', 'x axis')\n    .attr('transform', 'translate(0,' + height + ')')\n    .call(xAxis);\n\n// Add the y-axis.\nsvg.append('g')\n    .attr('class', 'y axis')\n    .call(yAxis);\n\n// Add an x-axis label.\nsvg.append('text')\n    .attr('class', 'x label')\n    .attr('text-anchor', 'end')\n    .attr('x', width)\n    .attr('y', height - 6);\n\n// Add a y-axis label.\nsvg.append('text')\n    .attr('class', 'y label')\n    .attr('text-anchor', 'end')\n    .attr('y', 6)\n    .attr('dy', '.75em')\n    .attr('transform', 'rotate(-90)');\n\n// Add the iteration label; the value is set on transition.\nvar label = svg.append('text')\n    .attr('class', 'iteration label')\n    .attr('text-anchor', 'end')\n    .attr('y', height - 24)\n    .attr('x', width)\n    .text('0000');\n\n// Load the data.\nd3.json('mnist.json?nocache=' + (new Date()).getTime(), function(results) {\n  var iterations = tsneData.iterations,\n      maxIteration = iterations.max(),\n      results = tsneData.data,\n      animationDuration = 100;\n\n  // Add a dot per observation. Initialize the data at 1, and set the colors.\n  var dot = svg.append('g')\n      .attr('class', 'dots')\n      .selectAll('.dot')\n      .data(interpolateData(1))\n      .enter().append('text')\n      .attr('class', 'dot')\n      .text(function(d) { return text(d); })\n      .style('fill', function(d) { return colorScale(color(d)); })\n      .call(position);\n\n  // Add an overlay for the iteration label.\n  var box = label.node().getBBox();\n\n  var overlay = svg.append('rect')\n        .attr('class', 'overlay')\n        .attr('x', box.x)\n        .attr('y', box.y)\n        .attr('width', box.width)\n        .attr('height', box.height)\n        .on('mouseover', enableInteraction);\n\n  // Start a transition that interpolates the data based on iteration.\n  svg.transition()\n      .duration(animationDuration * maxIteration)\n      .ease('linear')\n      .tween('iteration', tweenIteration)\n      .each('end', enableInteraction);\n\n  // Positions the dots based on data.\n  function position(dot) {\n    dot .attr('x', function(d) { return xScale(x(d)); })\n        .attr('y', function(d) { return yScale(y(d)); });\n  }\n\n  // After the transition finishes, you can mouseover to change the iteration.\n  function enableInteraction() {\n    var iterationScale = d3.scale.linear()\n        .domain([1, maxIteration])\n        .range([box.x + 10, box.x + box.width - 10])\n        .clamp(true);\n\n    // Cancel the current transition, if any.\n    svg.transition().duration(0);\n\n    overlay\n        .on('mouseover', mouseover)\n        .on('mouseout', mouseout)\n        .on('mousemove', mousemove)\n        .on('touchmove', mousemove);\n\n    function mouseover() {\n      label.classed('active', true);\n    }\n\n    function mouseout() {\n      label.classed('active', false);\n    }\n\n    function mousemove() {\n      displayIteration(iterationScale.invert(d3.mouse(this)[0]));\n    }\n  }\n\n  // Tweens the entire chart by first tweening the iteration, and then the data.\n  // For the interpolated data, the dots and label are redrawn.\n  function tweenIteration() {\n    var iteration = d3.interpolateNumber(1, maxIteration);\n    return function(t) { displayIteration(iteration(t)); };\n  }\n\n  // Updates the display to show the specified iteration.\n  function displayIteration(iteration) {\n  \tvar interpolated = interpolateData(iteration, iterations, results);\n  \t\n    var limits = interpolated.reduce(function(acc, data) {\n      return [Math.max(acc[0], Math.abs(data.x)), Math.max(acc[1], Math.abs(data.y))];\n    }, [0, 0]);\n    updateScale([-limits[0], limits[0]], [-limits[1], limits[1]]);\n\n    dot.data(interpolated, key).call(position);\n    label.text(pad(Math.round(iteration), 4));\n  }\n});\n\n</script>\n"
  }
]