[
  {
    "path": "LICENSE.txt",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"{}\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright {yyyy} {name of copyright owner}\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "README.md",
    "content": "# Basel Face Registration Pipeline\n\nThis repository contains all the code to reproduce our results from our recent publication:\n- Thomas Gerig, Andreas Morel-Forster, Clemens Blumer, Bernhard Egger, Marcel Lüthi, Sandro Schönborn and Thomas Vetter \n\"Morphable Face Models - An Open Framework\" ( [edoc-unibas](https://edoc.unibas.ch/69084/) / [IEEE](https://ieeexplore.ieee.org/document/8373814)  )\nIN: 13th IEEE Conference on Automatic Face and Gesture Recognition (FG 2018) pp. 75-82\n\n## Overview\n\nAfter the following information, we list all nescessary steps that you need to take to aquire and prepare the data as well as to run the pipeline and the experiments in the next sections.\n\n### Problems under Windows\n\nWhen you expericence some problems under Windows while importing the data, please use the branch **updateScalismoFaces**.\n\n### Reporting problems and discussion\n\nWhen you experience problems, you have questions or feedback please use the mailing list for [Morphable face models - an open framework](https://groups.google.com/forum/#!categories/scalismo-faces/morphable-face-models---an-open-framework).\n\n## Preparation\n\n### Step 1: Folder structure and Basel reference mesh\n\nFor the registration pipeline and the experiments to work properly, some data, such as reference templates and landmarks are needed. The files are available\nfor download at [Registration Pipeline Data](https://faces.dmi.unibas.ch/bfm/bfm2017.html). The download contains the following in detail:\n\n* Manually clicked landmarks for the BU3D-FE database.\n* BFM reference mesh and expression means.\n* Landmarks of the reference mesh.\n* Region mask for model-building.\n\nYou can copy the content of the zip folder into `pipeline-data`. The coarse structure looks the following:\n\n```\npipeline-data\n├── data\n│   ├── incoming\n│   ├── bu3dfe\n│   │   ├── original\n├── recognition-experiment\n```\n\nIf needed, you can change the location of the `pipeline-data` directory in the BU3DDataProvider.scala file.\n\n### Step 2: Bu3DFE Database\n\nTo register the BU-3DFE you have to acquire the dataset here:\n\n[BU-3DFE](http://www.cs.binghamton.edu/~lijun/Research/3DFE/3DFE_Analysis.html)\n\nand copy the `/original` folder to `data/bu3dfe/original/`.\n\n### Step 3: Sbt (Scala build tool)\n\nWe assume that you have sbt already installed. If not, please follow the instructions given\n[here](http://www.scala-sbt.org/release/tutorial/Setup.html).\n\nGenerally you can run the code using SBT. An example is how to run it in the terminal with:\n\n```\ncd /code-directory/\nsbt run\n```\n\nIf you do not have enough memory use:\n```\nsbt -J-Xmx50g run\n```\n\nThen the different steps are then listed and can be executed by entering the number of the script or by using:\n```\nsbt \"run-main package.Classname\"\n```\n\n\n## Running the Pipeline\n\n### Step 0: Data pre-processing & Folder Structure Creation\n\nDuring the pipeline we do not use the BU3DFE database data directly but first convert the data to match our formats.\nThis step is done only once as a pre-processing and the output can be reused whenever you run a new registration.\n\nTo convert the original data from the BU3DFE database to our format use the command:\n\n```\nsbt \"run-main preprocessing.ConvertBu3DRawData\"\n```\n\nExplain raw data preprocessing steps in script. (The script might need some cleanup.)\n\n### Step 1: Building the Neutral Prior Model\n\nPre-computing the neutral prior model can take quite some time.\nHowever, it has to be computed only once offline and is stored in `pipeline-data/data/incoming/reference/gpmodels/`.\n\nYou can run the building process with:\n\n```\nsbt \"run-main registration.BuildNeutralPrior\"\n```\n\n### Step 2: Building the Core Expression Model\n\nThe core expression model augments the neutral model with expression deformations.\n\n```\nsbt \"run-main registration.BuildCoreExpressionModel\"\n```\n\n### Step 3: Preprocess Landmarks\n\nThis step is used to transform the reference landmarks to the new mean of the generated models and to change the uncertainty\nof the individual landmarks.\n\n```\nsbt \"run-main preprocessing.PrepareReferenceLandmarks\"\n```\n\n### Step 4: Registration\n\n```\nsbt -J-Xmx40g \"run-main registration.Registration\"\n```\n\n### Step 5: Building the Morphable Model\n\nThe model building contains two steps:\n\n - First for each registration result the color is extracted using the input mesh.\n - Based on all meshes with color a model containing shape, color and expression variations is built.\n\nThis process may need some time and memory. Once the first step, the color extraction is computed it\ncan be reused if you change for example the mask of the model that should be built. But to change this\nyou have to out comment the corresponding line in the source code.\n\n```\nsbt -mem 40000 \"run-main modelbuilding.ModelBuilding\"\n```\n## Face Reconstruction from 2D Image\n\nFirst you have to download the Multi-PIE database and copy the necessary files to the correct folders. \nThis is described in the README file in the folder recogniton-experiment (comes with seperate download of the Basel Face Pipeline [Data](https://faces.dmi.unibas.ch/bfm/bfm2017.html)). \nFor those experiments you need the Basel Face Model 2009 and 2017, which can be downloaded at:\n[Probabilistic Morphable Models](https://gravis.dmi.unibas.ch/PMM/)\n\nTo run the 3D reconstructions from the Multi-PIE database, you may want to execute it multiple times in parallel\nsince a single fit taks ~20 minutes:\n```\nsbt -mem 5000 \"fitting.experiments.RecognitionMultiPiePose\"\n```\nAnd to calculate the recognition scores execute:\n```\nsbt -mem 5000 \"fitting.experiments.RecognitionEvaluation\"\n```\nThose where the neutral scores. To perform the expression experiments, run:\n```\nsbt -mem 5000 \"fitting.experiments.RecognitionMultiPieExpression\"\nsbt -mem 5000 \"fitting.experiments.RecognitionEvaluationEx\"\n```\n\n\n\n"
  },
  {
    "path": "build.sbt",
    "content": "name := \"basel-face-pipeline\"\n\nversion := \"0.1\"\n\nscalaVersion := \"2.11.8\"\n\nscalacOptions := Seq(\"-unchecked\", \"-deprecation\", \"-encoding\", \"utf8\")\n\nresolvers += Resolver.jcenterRepo\n\nresolvers += Resolver.bintrayRepo(\"unibas-gravis\", \"maven\")\n\nlibraryDependencies += \"ch.unibas.cs.gravis\" %% \"scalismo-faces\" % \"0.5.0\"\n\nlibraryDependencies += \"ch.unibas.cs.gravis\" %% \"scalismo-ui\" % \"0.11.+\"\n\nlibraryDependencies += \"com.github.tototoshi\" %% \"scala-csv\" % \"1.3.3\"\n\nlibraryDependencies += \"com.typesafe.scala-logging\" %% \"scala-logging\" % \"3.5.0\"\n\nlibraryDependencies += \"ch.qos.logback\" % \"logback-classic\" % \"1.1.7\"\n\nlibraryDependencies ~= { _.map(_.exclude(\"org.slf4j\", \"slf4j-nop\")) }\n\n"
  },
  {
    "path": "pipeline-data/.gitignore",
    "content": ""
  },
  {
    "path": "src/main/resources/logback.xml",
    "content": "<!--\n  ~ Copyright University of Basel, Graphics and Vision Research Group\n  ~\n  ~ Licensed under the Apache License, Version 2.0 (the \"License\");\n  ~ you may not use this file except in compliance with the License.\n  ~ You may obtain a copy of the License at\n  ~\n  ~     http://www.apache.org/licenses/LICENSE-2.0\n  ~\n  ~ Unless required by applicable law or agreed to in writing, software\n  ~ distributed under the License is distributed on an \"AS IS\" BASIS,\n  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n  ~ See the License for the specific language governing permissions and\n  ~ limitations under the License.\n  -->\n<configuration>\n    <appender name=\"STDOUT\" class=\"ch.qos.logback.core.ConsoleAppender\">\n        <encoder>\n            <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>\n        </encoder>\n    </appender>\n\n    <appender name=\"FILE\" class=\"ch.qos.logback.core.FileAppender\">\n        <!-- path to your log file, where you want to store logs -->\n        <file>/tmp/face-registration.log</file>\n        <append>false</append>\n        <encoder>\n            <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>\n        </encoder>\n    </appender>\n\n    <logger name=\"registration.Registration\" level=\"DEBUG\" />\n    <logger name=\"breeze.optimize\" level=\"ERROR\" />\n\n    <root level=\"debug\">\n        <appender-ref ref=\"STDOUT\" />\n        <appender-ref ref=\"FILE\" />\n    </root>\n</configuration>"
  },
  {
    "path": "src/main/scala/ch/unibas/cs/gravis/facepipeline/BU3DDataProvider.scala",
    "content": "/*\n * Copyright University of Basel, Graphics and Vision Research Group\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage ch.unibas.cs.gravis.facepipeline\n\nimport java.io.File\n\nimport registration.modelbuilding.FaceMask\nimport scalismo.faces.io.{MoMoIO, TLMSLandmarksIO}\nimport scalismo.faces.landmarks.TLMSLandmark3D\nimport scalismo.faces.mesh.{BinaryMask, ColorNormalMesh3D}\nimport scalismo.faces.momo.MoMo\nimport scalismo.geometry.{Landmark, _3D}\nimport scalismo.io.{LandmarkIO, MeshIO, StatismoIO}\nimport scalismo.mesh.TriangleMesh\nimport scalismo.statisticalmodel.StatisticalMeshModel\n\nimport scala.io.Source\nimport scala.reflect.io.Path\nimport scala.util.{Failure, Success, Try}\n\nobject BU3DDataProvider extends DataProvider {\n\n  override case object Neutral extends ExpressionType { override def toString: String = \"_NE00\" }\n  case object Sadness extends ExpressionType { override def toString: String = \"_SA04\" }\n  case object Surprise extends ExpressionType { override def toString: String = \"_SU04\" }\n  case object Disgust extends ExpressionType { override def toString: String = \"_DI04\" }\n  case object Fear extends ExpressionType { override def toString: String = \"_FE04\" }\n  case object Joy extends ExpressionType { override def toString: String = \"_HA04\" }\n  case object Anger extends ExpressionType { override def toString: String = \"_AN04\" }\n  case object CoreExpression extends ExpressionType { override def toString: String = \"_ALLEXP\" }\n\n  object Expressions {\n    def expressionList(): Seq[ExpressionType] = Seq(Neutral, Sadness, Surprise, Disgust, Fear, Joy, Anger)\n    def expressionModelTypes(): Seq[ExpressionType] = Seq(Neutral,CoreExpression)\n  }\n  override def expressions() = Expressions.expressionList()\n\n\n\n  case object RAW extends MaskType { override def toString: String = \"_RAW\" }\n  case object F3D extends MaskType { override def toString: String = \"_F3D\" }\n\n  object Masks {\n    def maskList(): Seq[MaskType] = Seq(RAW, F3D)\n  }\n\n  override def masks: Seq[MaskType] = Masks.maskList()\n\n\n\n  case class BU3DID(override val id: String, override val raceTag: String) extends Person\n  object BU3DID {\n    def fromFilename(filename: String): BU3DID = {\n      BU3DID(filename.substring(0, 5), filename.substring(10, 12))\n    }\n  }\n\n  override def personFromFilename(filename: String): Person = BU3DID.fromFilename(filename)\n\n\n\n\n  case object Basel extends DataFlag { override def toString: String = \"_basel\" }\n  case object Original extends DataFlag { override def toString: String = \"\" }\n  case object Aligned extends DataFlag { override def toString: String = \"_aligned\" }\n\n  object Flags {\n    def lmFlagList(): Seq[DataFlag] = Seq(Basel, Original, Aligned)\n  }\n\n\n\n  private def setFileAccessMode(filename: String): Unit = setFileAccessMode(new File(filename))\n  private def setFileAccessMode(path: Path): Unit = setFileAccessMode(path.jfile)\n  private def setFileAccessMode(file: File): Unit = {\n    file.setReadable(true,false)\n    file.setWritable(true,false)\n  }\n\n  override def repositoryRoot: Path = Path(\"pipeline-data/\")\n\n  override def incoming: BU3DDataProvider.Incoming = {\n\n    new BU3DDataProvider.Incoming {\n\n      val incomingPath = repositoryRoot / \"data\" / \"incoming\"\n      incomingPath.jfile.mkdirs()\n\n      override def reference: BU3DDataProvider.Reference = new BU3DDataProvider.Reference {\n\n        val referencePath = incomingPath / \"reference\"\n        referencePath.jfile.mkdirs()\n\n        override def loadFaceMask(): Try[FaceMask] = {\n\n          val maskPath = referencePath / \"masks\"\n          maskPath.jfile.mkdirs()\n\n          for {\n            level_mask <- MeshIO.readScalarMeshField[Int](new File(maskPath.jfile, \"level-mask-l7.vtk\"))\n            semantic_mask <- MeshIO.readScalarMeshField[Short](new File(maskPath.jfile, \"semantic-mask-l7.vtk\")).map(_.map(_.toInt))\n          } yield {\n            FaceMask(level_mask,semantic_mask)\n          }\n\n        }\n\n        override def loadMesh(expression: ExpressionType): Try[TriangleMesh[_3D]] = {\n          import scalismo.faces.io.MeshIO\n          val mshPath = referencePath / \"mesh\"\n          mshPath.jfile.mkdirs()\n          expression match {\n            case Neutral => MeshIO.read(new File(mshPath.jfile, \"mean2012_l7_bfm_nomouth.ply\"))\n              .map(_.shape)\n            case Sadness => MeshIO.read(new File(mshPath.jfile, \"mean2015.1_l7_bfm_nomouth-sadness.ply\"))\n              .map(_.shape)\n            case Surprise => MeshIO.read(new File(mshPath.jfile, \"mean2015.1_l7_bfm_nomouth-surprise.ply\"))\n              .map(_.shape)\n            case Disgust => MeshIO.read(new File(mshPath.jfile, \"mean2015.1_l7_bfm_nomouth-disgust.ply\"))\n              .map(_.shape)\n            case Fear => MeshIO.read(new File(mshPath.jfile, \"mean2015.1_l7_bfm_nomouth-fear.ply\"))\n              .map(_.shape)\n            case Joy => MeshIO.read(new File(mshPath.jfile, \"mean2015.1_l7_bfm_nomouth-joy.ply\"))\n              .map(_.shape)\n            case Anger => MeshIO.read(new File(mshPath.jfile, \"mean2015.1_l7_bfm_nomouth-anger.ply\"))\n              .map(_.shape)\n          }\n        }\n\n        override def loadLandmarks(expression: ExpressionType): Try[Seq[Landmark[_3D]]] = {\n          val lmPath = referencePath / \"landmarks\"\n          lmPath.jfile.mkdirs()\n            LandmarkIO.readLandmarksJson[_3D](new File(lmPath.jfile, s\"reference${expression.toString}.json\"))\n        }\n\n        override def saveLandmarks(expression: ExpressionType, landmarks: Seq[Landmark[_3D]]): Try[Unit] = {\n          val lmPath = referencePath / \"landmarks\"\n          lmPath.jfile.mkdirs()\n          val res = LandmarkIO.writeLandmarksJson[_3D](landmarks.toIndexedSeq, new File(lmPath.jfile, s\"reference${expression.toString}.json\"))\n          res match {\n              case Success(_) => setFileAccessMode(lmPath)\n              case _ =>\n            }\n          res\n        }\n\n\n        override def loadLineLandmarks(expression: ExpressionType): Try[Seq[Landmark[_3D]]] = ???\n      }\n\n      def landmarksPath(id: Person, expression: ExpressionType, mask: MaskType = RAW, flag: DataFlag = Basel): Path = {\n        incomingPath / \"landmarks\" / s\"${id.id}$expression${id.raceTag}${mask}$flag.tlms\"\n      }\n\n      override def loadLandmarks(id: Person, expression: ExpressionType): Try[Seq[Landmark[_3D]]] = loadLandmarks(id, expression, RAW, Basel)\n      override def loadLandmarks(id: Person, expression: ExpressionType, mask: MaskType): Try[Seq[Landmark[_3D]]] = loadLandmarks(id, expression, mask, Basel)\n      override def loadLandmarks(id: Person, expression: ExpressionType, mask: MaskType, flag: DataFlag): Try[Seq[Landmark[_3D]]] = {\n        val path = landmarksPath(id, expression, mask, flag)\n        TLMSLandmarksIO.read3D(path.jfile) match {\n          case Success(tlmsLandmarks) => Success(tlmsLandmarks.filter(_.visible).map(_.toLandmark))\n          case Failure(t) => Failure(t)\n        }\n      }\n\n      override def saveLandmarks(id: Person, expression: ExpressionType, landmarks: Seq[Landmark[_3D]]): Try[Unit] = saveLandmarks(id, expression, RAW, Basel, landmarks)\n      override def saveLandmarks(id: Person, expression: ExpressionType, mask: MaskType, landmarks: Seq[Landmark[_3D]]): Try[Unit] = saveLandmarks(id, expression, mask, Basel, landmarks)\n      override def saveLandmarks(id: Person, expression: ExpressionType, mask: MaskType, flag: DataFlag, landmarks: Seq[Landmark[_3D]]): Try[Unit] = {\n        val path = landmarksPath(id, expression, mask, flag)\n        path.jfile.getParentFile.mkdirs()\n        val tlms = landmarks.map { lm =>\n          TLMSLandmark3D(lm.id, lm.point, visible = true)\n        }.toIndexedSeq\n        val res = TLMSLandmarksIO.write3D(tlms, path.jfile)\n        res match {\n          case Success(_) => setFileAccessMode(path)\n          case _ =>\n        }\n        res\n      }\n\n      def meshPath(id: Person, expression: ExpressionType, mask: MaskType = RAW, flag: DataFlag = Original): Path = {\n        incomingPath / \"mesh\" / s\"${id.id}${expression}${id.raceTag}${mask}$flag.ply\"\n      }\n\n      override def loadMesh(id: Person, expression: ExpressionType) = loadMesh(id, expression, RAW, Original)\n      override def loadMesh(id: Person, expression: ExpressionType, mask: MaskType) = loadMesh(id, expression, mask, Original)\n      override def loadMesh(id: Person, expression: ExpressionType, mask: MaskType, flag: DataFlag): Try[TriangleMesh[_3D]] = {\n        import scalismo.faces.io.MeshIO\n        val path = meshPath(id, expression, mask, flag)\n        MeshIO.read(path.jfile).map(_.shape)\n      }\n\n      override def loadColoredMesh(id: Person, expression: ExpressionType): Try[ColorNormalMesh3D] = loadColoredMesh(id, expression, RAW, Original)\n      override def loadColoredMesh(id: Person, expression: ExpressionType, mask: MaskType): Try[ColorNormalMesh3D] = loadColoredMesh(id, expression, mask, Original)\n      override def loadColoredMesh(id: Person, expression: ExpressionType, mask: MaskType, flag: DataFlag): Try[ColorNormalMesh3D] = {\n        import scalismo.faces.io.MeshIO\n        val path = meshPath(id, expression, mask, flag)\n        MeshIO.read(path.jfile).map(ocnm => ColorNormalMesh3D(ocnm.shape,ocnm.color.get,ocnm.normals.get))\n      }\n\n      override def saveMesh(id: Person, expression: ExpressionType, mesh: TriangleMesh[_3D]): Try[Unit] = saveMesh(id, expression, RAW, Original, mesh)\n      override def saveMesh(id: Person, expression: ExpressionType, mask: MaskType, mesh: TriangleMesh[_3D]): Try[Unit] = saveMesh(id, expression, mask, Original, mesh)\n      override def saveMesh(id: Person, expression: ExpressionType, mask: MaskType, flag: DataFlag, mesh: TriangleMesh[_3D]): Try[Unit] = {\n        import scalismo.faces.io.MeshIO\n        val path = meshPath(id, expression, mask, flag)\n        path.jfile.getParentFile.mkdirs()\n        MeshIO.write(mesh, None, None, path.jfile)\n        setFileAccessMode(path)\n        Success(Unit)\n      }\n\n      override def saveMesh(id: Person, expression: ExpressionType, mesh: ColorNormalMesh3D): Try[Unit] = saveMesh(id, expression, RAW, Original, mesh)\n      override def saveMesh(id: Person, expression: ExpressionType, mask: MaskType, mesh: ColorNormalMesh3D): Try[Unit] = saveMesh(id, expression, mask, Original, mesh)\n      override def saveMesh(id: Person, expression: ExpressionType, mask: MaskType, flag: DataFlag, mesh: ColorNormalMesh3D): Try[Unit] = {\n        import scalismo.faces.io.MeshIO\n        val path = meshPath(id, expression, mask, flag)\n        path.jfile.getParentFile.mkdirs()\n        MeshIO.write(mesh, path.jfile)\n        setFileAccessMode(path)\n        Success(Unit)\n      }\n\n      override def ids(expression: ExpressionType): Seq[Person] = {\n        new File(incomingPath.jfile, \"mesh\").listFiles()\n          .filter(_.getName.endsWith(\".ply\"))\n          .filter(_.getName.contains(RAW.toString))\n          .filter(_.getName.contains(expression.toString))\n          .map(file => BU3DID.fromFilename(file.getName))\n          .toSeq\n      }\n    }\n  }\n\n  override def registration: BU3DDataProvider.SurfaceRegistration = {\n\n    new BU3DDataProvider.SurfaceRegistration {\n      val registrationPath = repositoryRoot / \"data\" / \"registered\"\n      registrationPath.jfile.mkdirs()\n\n      val referencePath = registrationPath / \"reference\"\n      referencePath.jfile.mkdirs()\n\n      val modelPath = referencePath / \"gpmodels\"\n      modelPath.jfile.mkdirs()\n\n      override def loadPriorModel(expression: ExpressionType): Try[StatisticalMeshModel] = {\n        expression match {\n          case Neutral => StatismoIO.readStatismoMeshModel(new File(modelPath.jfile, \"face-model-neutral.h5\"))\n          case Sadness => StatismoIO.readStatismoMeshModel(new File(modelPath.jfile, \"face-model-sadness.h5\"))\n          case Surprise => StatismoIO.readStatismoMeshModel(new File(modelPath.jfile, \"face-model-surprise.h5\"))\n          case Disgust => StatismoIO.readStatismoMeshModel(new File(modelPath.jfile, \"face-model-disgust.h5\"))\n          case Fear => StatismoIO.readStatismoMeshModel(new File(modelPath.jfile, \"face-model-fear.h5\"))\n          case Joy => StatismoIO.readStatismoMeshModel(new File(modelPath.jfile, \"face-model-joy.h5\"))\n          case Anger => StatismoIO.readStatismoMeshModel(new File(modelPath.jfile, \"face-model-anger.h5\"))\n          case CoreExpression => StatismoIO.readStatismoMeshModel(new File(modelPath.jfile, \"face-model-combined-expressions.h5\"))\n        }\n      }\n\n      override def savePriorModel(model: StatisticalMeshModel, expression: ExpressionType): Try[Unit] = {\n        expression match {\n          case Neutral => StatismoIO.writeStatismoMeshModel(model, new File(modelPath.jfile, \"face-model-neutral.h5\"))\n          case Sadness => StatismoIO.writeStatismoMeshModel(model, new File(modelPath.jfile, \"face-model-sadness.h5\"))\n          case Surprise => StatismoIO.writeStatismoMeshModel(model, new File(modelPath.jfile, \"face-model-surprise.h5\"))\n          case Disgust => StatismoIO.writeStatismoMeshModel(model, new File(modelPath.jfile, \"face-model-disgust.h5\"))\n          case Fear => StatismoIO.writeStatismoMeshModel(model, new File(modelPath.jfile, \"face-model-fear.h5\"))\n          case Joy => StatismoIO.writeStatismoMeshModel(model, new File(modelPath.jfile, \"face-model-joy.h5\"))\n          case Anger => StatismoIO.writeStatismoMeshModel(model, new File(modelPath.jfile, \"face-model-anger.h5\"))\n          case CoreExpression => StatismoIO.writeStatismoMeshModel(model, new File(modelPath.jfile, \"face-model-combined-expressions.h5\"))\n        }\n\n      }\n\n      def meshPath(id: Person, expression: ExpressionType, mask: MaskType = RAW, flag: DataFlag = Original): Path = {\n        registrationPath / \"mesh\" / s\"${id.id}${expression}${id.raceTag}${mask}${flag}.ply\"\n      }\n\n      override def loadMesh(id: Person, expression: ExpressionType): Try[TriangleMesh[_3D]] = {\n        import scalismo.faces.io.MeshIO\n        val path = meshPath(id, expression)\n        MeshIO.read(path.jfile).map(_.shape)\n      }\n\n      override def loadMesh(id: Person, expression: ExpressionType, mask: MaskType): Try[TriangleMesh[_3D]] = ???\n      override def loadMesh(id: Person, expression: ExpressionType, mask: MaskType, flag: DataFlag): Try[TriangleMesh[_3D]] = ???\n      override def loadColoredMesh(id: Person, expression: ExpressionType): Try[ColorNormalMesh3D] = ???\n      override def loadColoredMesh(id: Person, expression: ExpressionType, mask: MaskType): Try[ColorNormalMesh3D] = ???\n      override def loadColoredMesh(id: Person, expression: ExpressionType, mask: MaskType, flag: DataFlag): Try[ColorNormalMesh3D] = ???\n\n      override def saveMesh(id: Person, expression: ExpressionType, mesh: TriangleMesh[_3D]): Try[Unit] = {\n        import scalismo.faces.io.MeshIO\n        val path = meshPath(id, expression)\n        MeshIO.write(mesh, None, None, path.jfile)\n        setFileAccessMode(path)\n        Success(Unit)\n      }\n\n      override def saveMesh(id: Person, expression: ExpressionType, mask: MaskType, mesh: TriangleMesh[_3D]): Try[Unit] = ???\n      override def saveMesh(id: Person, expression: ExpressionType, mask: MaskType, flag: DataFlag, mesh: TriangleMesh[_3D]): Try[Unit] = ???\n\n      override def saveMesh(id: Person, expression: ExpressionType, mesh: ColorNormalMesh3D): Try[Unit] = ???\n      override def saveMesh(id: Person, expression: ExpressionType, mask: MaskType, mesh: ColorNormalMesh3D): Try[Unit] = ???\n      override def saveMesh(id: Person, expression: ExpressionType, mask: MaskType, flag: DataFlag, mesh: ColorNormalMesh3D): Try[Unit] = ???\n\n      override def ids(expression: ExpressionType): Seq[Person] = {\n        new File(registrationPath.jfile, \"mesh\").listFiles()\n          .filter(_.getName.endsWith(\"ply\"))\n          .filter(_.getName.contains(expression.toString))\n          .map(file => BU3DID.fromFilename(file.getName))\n          .toSeq\n      }\n\n    }\n  }\n\n  override def model : BU3DDataProvider.ModelBuilding = BU3DModelBuilding\n\n  object BU3DModelBuilding extends BU3DDataProvider.ModelBuilding {\n    val modelBuildingPath = repositoryRoot / \"data\" / \"modelbuilding\"\n    modelBuildingPath.jfile.mkdirs()\n\n    val modelDirectoryPath = modelBuildingPath / \"model\"\n    modelDirectoryPath.jfile.mkdirs()\n\n    val colorExtractdMeshPath = modelBuildingPath / \"mesh\"\n    colorExtractdMeshPath.jfile.mkdirs()\n\n    def meshPath(id: Person, expression: ExpressionType, mask: MaskType = RAW, flag: DataFlag = Original): Path = {\n      colorExtractdMeshPath / s\"${id.id}${expression}${id.raceTag}${mask}$flag.ply\"\n    }\n\n    override def loadColoredMesh(id: Person, expression: ExpressionType): Try[ColorNormalMesh3D] = {\n      loadColoredMesh(id,expression,RAW,Original)\n    }\n\n    def loadColoredMesh(id: Person, expression: ExpressionType, mask: MaskType = RAW, flag: DataFlag = Original): Try[ColorNormalMesh3D] = {\n      import scalismo.faces.io.MeshIO\n      val path = meshPath(id, expression, mask, flag)\n      MeshIO.read(path.jfile).map(_.colorNormalMesh3D.get)\n    }\n\n    override def saveColoredMesh(id: Person, expression: ExpressionType, mesh: ColorNormalMesh3D): Try[Unit] = {\n      saveColoredMesh(id,expression,mesh,RAW,Original)\n    }\n\n    def saveColoredMesh(id: Person, expression: ExpressionType, mesh: ColorNormalMesh3D, mask: MaskType = RAW, flag: DataFlag = Original): Try[Unit] = {\n      import scalismo.faces.io.MeshIO\n      val path = meshPath(id, expression, mask, flag)\n      path.jfile.getParentFile.mkdirs\n      MeshIO.write(mesh, path.jfile)\n      setFileAccessMode(path)\n      Success(Unit)\n    }\n\n    def modelPath(mask: MaskType): Path = {\n      modelDirectoryPath / s\"bu3d_pami17${mask}.h5\" // @todo think about registration identifier in name\n    }\n\n    override def saveModel(mask: MaskType, momo: MoMo): Try[Unit]  = {\n      val path = modelPath(mask)\n      path.jfile.getParentFile.mkdirs\n      val res = MoMoIO.write(momo, path.jfile, \"\")\n      res match {\n        case Success(_) => setFileAccessMode(path)\n        case _ =>\n      }\n      res\n    }\n\n    override def loadModel(mask: MaskType): Try[MoMo] = {\n      MoMoIO.read(modelPath(mask).jfile, \"\")\n    }\n\n  }\n\n  override def fitting: BU3DDataProvider.Fitting = {\n    new BU3DDataProvider.Fitting {\n\n    }\n  }\n\n\n\n\n\n  override def loadMeshMask(from: String, to: String): Try[BinaryMask] = {\n    BinaryMask.load(Source.fromFile(new File(repositoryRoot / \"data\" / \"incoming\" / \"reference\" / \"masks\" / from+\"_TO_\"+to+\".mask\")))\n  }\n\n\n}\n"
  },
  {
    "path": "src/main/scala/ch/unibas/cs/gravis/facepipeline/DataProvider.scala",
    "content": "/*\n * Copyright University of Basel, Graphics and Vision Research Group\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage ch.unibas.cs.gravis.facepipeline\n\nimport registration.modelbuilding.FaceMask\nimport scalismo.faces.mesh.{BinaryMask, ColorNormalMesh3D}\nimport scalismo.faces.momo.MoMo\nimport scalismo.geometry.{Landmark, _3D}\nimport scalismo.mesh.TriangleMesh\nimport scalismo.statisticalmodel.StatisticalMeshModel\n\nimport scala.reflect.io.Path\nimport scala.util.Try\n\ntrait ExpressionType {\n  override def toString : String\n}\n\ntrait MaskType {\n  override def toString: String\n}\n\ntrait DataFlag {\n  override def toString: String\n}\n\ntrait DataProvider {\n\n  trait Person {\n    def id: String\n    def raceTag: String\n  }\n\n  trait WithMesh {\n    def loadMesh(id: Person, expression: ExpressionType): Try[TriangleMesh[_3D]]\n    def loadMesh(id: Person, expression: ExpressionType, mask: MaskType): Try[TriangleMesh[_3D]]\n    def loadMesh(id: Person, expression: ExpressionType, mask: MaskType, flag: DataFlag): Try[TriangleMesh[_3D]]\n    def loadColoredMesh(id: Person, expression: ExpressionType): Try[ColorNormalMesh3D]\n    def loadColoredMesh(id: Person, expression: ExpressionType, mask: MaskType): Try[ColorNormalMesh3D]\n    def loadColoredMesh(id: Person, expression: ExpressionType, mask: MaskType, flag: DataFlag): Try[ColorNormalMesh3D]\n    def saveMesh(id: Person, expression: ExpressionType, mesh: TriangleMesh[_3D]): Try[Unit]\n    def saveMesh(id: Person, expression: ExpressionType, mask: MaskType, mesh: TriangleMesh[_3D]): Try[Unit]\n    def saveMesh(id: Person, expression: ExpressionType, mask: MaskType, flag: DataFlag, mesh: TriangleMesh[_3D]): Try[Unit]\n    def saveMesh(id: Person, expression: ExpressionType, mesh: ColorNormalMesh3D): Try[Unit]\n    def saveMesh(id: Person, expression: ExpressionType, mask: MaskType, mesh: ColorNormalMesh3D): Try[Unit]\n    def saveMesh(id: Person, expression: ExpressionType, mask: MaskType, flag: DataFlag, mesh: ColorNormalMesh3D): Try[Unit]\n  }\n\n  trait WithLandmarks {\n    def loadLandmarks(id: Person, expression: ExpressionType): Try[Seq[Landmark[_3D]]]\n    def loadLandmarks(id: Person, expression: ExpressionType, mask: MaskType): Try[Seq[Landmark[_3D]]]\n    def loadLandmarks(id: Person, expression: ExpressionType, mask: MaskType, flag: DataFlag): Try[Seq[Landmark[_3D]]]\n    def saveLandmarks(id: Person, expression: ExpressionType, landmarks: Seq[Landmark[_3D]]): Try[Unit]\n    def saveLandmarks(id: Person, expression: ExpressionType, mask: MaskType, landmarks: Seq[Landmark[_3D]]): Try[Unit]\n    def saveLandmarks(id: Person, expression: ExpressionType, mask: MaskType, flag: DataFlag, landmarks: Seq[Landmark[_3D]]): Try[Unit]\n  }\n\n  trait WithLineLandmarks {\n    def loadLineLandmarks(id: Person, expression: ExpressionType): Try[Seq[Landmark[_3D]]]\n    def saveLineLandmarks(id: Person, expression: ExpressionType): Try[Seq[Landmark[_3D]]]\n  }\n\n  trait WithIds {\n    def ids(expression: ExpressionType): Seq[Person]\n  }\n\n  trait Reference {\n    def loadMesh(expression: ExpressionType): Try[TriangleMesh[_3D]]\n    def loadFaceMask(): Try[FaceMask]\n    def loadLandmarks(expression: ExpressionType): Try[Seq[Landmark[_3D]]]\n    def saveLandmarks(expression: ExpressionType, landmarks : Seq[Landmark[_3D]]): Try[Unit]\n    def loadLineLandmarks(expression: ExpressionType): Try[Seq[Landmark[_3D]]]\n  }\n\n  trait Incoming extends WithIds with WithMesh with WithLandmarks {\n\n    def reference: Reference\n  }\n\n  trait SurfaceRegistration extends WithIds with WithMesh {\n    def loadPriorModel(expression: ExpressionType): Try[StatisticalMeshModel]\n    def savePriorModel(model: StatisticalMeshModel, expressionType: ExpressionType): Try[Unit]\n  }\n\n  trait ModelBuilding {\n    def loadModel( mask: MaskType ) : Try[MoMo]\n    def saveModel( mask: MaskType, momo: MoMo ) : Try[Unit]\n    def loadColoredMesh(id: Person, expression: ExpressionType): Try[ColorNormalMesh3D]\n    def saveColoredMesh(id: Person, expression: ExpressionType, mesh: ColorNormalMesh3D): Try[Unit]\n  }\n\n  trait Fitting {}\n\n  def Neutral: ExpressionType\n\n  def repositoryRoot: Path\n\n  def incoming: Incoming\n\n  def registration: SurfaceRegistration\n\n  def model: ModelBuilding\n\n  def fitting: Fitting\n\n  def expressions: Seq[ExpressionType]\n\n  def masks: Seq[MaskType]\n\n  def loadMeshMask(from: String, to: String): Try[BinaryMask]\n\n  def personFromFilename(filename: String): Person\n}\n\n"
  },
  {
    "path": "src/main/scala/ch/unibas/cs/gravis/facepipeline/PipelineStep.scala",
    "content": "/*\n * Copyright University of Basel, Graphics and Vision Research Group\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage ch.unibas.cs.gravis.facepipeline\n\ntrait PipelineStep {\n\n  def run() : Unit\n}\n"
  },
  {
    "path": "src/main/scala/fitting/StandardFitScript.scala",
    "content": "/*\n * Copyright University of Basel, Graphics and Vision Research Group\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage fitting\n\nimport java.io.File\n\nimport scalismo.faces.color.{RGB, RGBA}\nimport scalismo.faces.deluminate.SphericalHarmonicsOptimizer\nimport scalismo.faces.image.PixelImage\nimport scalismo.faces.io.{PixelImageIO, RenderParameterIO, TLMSLandmarksIO}\nimport scalismo.faces.mesh.MeshSurfaceSampling\nimport scalismo.faces.parameters.RenderParameter\nimport scalismo.faces.sampling.face.evaluators.PixelEvaluators._\nimport scalismo.faces.sampling.face.evaluators.PointEvaluators.IsotropicGaussianPointEvaluator\nimport scalismo.faces.sampling.face.evaluators.PriorEvaluators.{GaussianShapePrior, GaussianTexturePrior}\nimport scalismo.faces.sampling.face.evaluators._\nimport scalismo.faces.sampling.face.loggers._\nimport scalismo.faces.sampling.face.proposals.ImageCenteredProposal.implicits._\nimport scalismo.faces.sampling.face.proposals.ParameterProposals.implicits._\nimport scalismo.faces.sampling.face.proposals.SphericalHarmonicsLightProposals._\nimport scalismo.faces.sampling.face.proposals._\nimport scalismo.faces.sampling.face.{MoMoRenderer, ParametricLandmarksRenderer, ParametricModel}\nimport scalismo.geometry.{Vector, Vector3D, _2D}\nimport scalismo.sampling.algorithms.MetropolisHastings\nimport scalismo.sampling.evaluators.ProductEvaluator\nimport scalismo.sampling.loggers.ChainStateLogger.implicits._\nimport scalismo.sampling.loggers.ChainStateLoggerContainer.implicits._\nimport scalismo.sampling.loggers.{BestSampleLogger}\nimport scalismo.sampling.proposals.MixtureProposal.implicits._\nimport scalismo.sampling.proposals.{MetropolisFilterProposal, MixtureProposal}\nimport scalismo.sampling.{ProposalGenerator, TransitionProbability}\nimport scalismo.utils.Random\n\n/* This Fitscript with its evaluators and the proposal distribution follows closely the proposed setting of:\n\nMarkov Chain Monte Carlo for Automated Face Image Analysis\nSandro Sch�nborn, Bernhard Egger, Andreas Morel-Forster and Thomas Vetter\nInternational Journal of Computer Vision 123(2), 160-183 , June 2017\nDOI: http://dx.doi.org/10.1007/s11263-016-0967-5\n\nTo understand the concepts behind the fitscript and the underlying methods there is a tutorial on:\nhttp://gravis.dmi.unibas.ch/pmm/\n\n */\n\nobject StandardFitScript {\n\n  /* Collection of all pose related proposals */\n  def defaultPoseProposal(lmRenderer: ParametricLandmarksRenderer)(implicit rnd: Random):\n  ProposalGenerator[RenderParameter] with TransitionProbability[RenderParameter] = {\n    import MixtureProposal.implicits._\n\n    val yawProposalC = GaussianRotationProposal(Vector3D.unitY, 0.75f)\n    val yawProposalI = GaussianRotationProposal(Vector3D.unitY, 0.10f)\n    val yawProposalF = GaussianRotationProposal(Vector3D.unitY, 0.01f)\n    val rotationYaw = MixtureProposal(0.1 *: yawProposalC + 0.4 *: yawProposalI + 0.5 *: yawProposalF)\n\n    val pitchProposalC = GaussianRotationProposal(Vector3D.unitX, 0.75f)\n    val pitchProposalI = GaussianRotationProposal(Vector3D.unitX, 0.10f)\n    val pitchProposalF = GaussianRotationProposal(Vector3D.unitX, 0.01f)\n    val rotationPitch = MixtureProposal(0.1 *: pitchProposalC + 0.4 *: pitchProposalI + 0.5 *: pitchProposalF)\n\n    val rollProposalC = GaussianRotationProposal(Vector3D.unitZ, 0.75f)\n    val rollProposalI = GaussianRotationProposal(Vector3D.unitZ, 0.10f)\n    val rollProposalF = GaussianRotationProposal(Vector3D.unitZ, 0.01f)\n    val rotationRoll = MixtureProposal(0.1 *: rollProposalC + 0.4 *: rollProposalI + 0.5 *: rollProposalF)\n\n    val rotationProposal = MixtureProposal(0.5 *: rotationYaw + 0.3 *: rotationPitch + 0.2 *: rotationRoll).toParameterProposal\n\n    val translationC = GaussianTranslationProposal(Vector(300f, 300f)).toParameterProposal\n    val translationF = GaussianTranslationProposal(Vector(50f, 50f)).toParameterProposal\n    val translationHF = GaussianTranslationProposal(Vector(10f, 10f)).toParameterProposal\n    val translationProposal = MixtureProposal(0.2 *: translationC + 0.2 *: translationF + 0.6 *: translationHF)\n\n    val distanceProposalC = GaussianDistanceProposal(500f, compensateScaling = true).toParameterProposal\n    val distanceProposalF = GaussianDistanceProposal(50f, compensateScaling = true).toParameterProposal\n    val distanceProposalHF = GaussianDistanceProposal(5f, compensateScaling = true).toParameterProposal\n    val distanceProposal = MixtureProposal(0.2 *: distanceProposalC + 0.6 *: distanceProposalF + 0.2 *: distanceProposalHF)\n\n    val scalingProposalC = GaussianScalingProposal(0.15f).toParameterProposal\n    val scalingProposalF = GaussianScalingProposal(0.05f).toParameterProposal\n    val scalingProposalHF = GaussianScalingProposal(0.01f).toParameterProposal\n    val scalingProposal = MixtureProposal(0.2 *: scalingProposalC + 0.6 *: scalingProposalF + 0.2 *: scalingProposalHF)\n\n    val poseMovingNoTransProposal = MixtureProposal(rotationProposal + distanceProposal + scalingProposal)\n    val centerREyeProposal = poseMovingNoTransProposal.centeredAt(\"right.eye.corner_outer\", lmRenderer).get\n    val centerLEyeProposal = poseMovingNoTransProposal.centeredAt(\"left.eye.corner_outer\", lmRenderer).get\n    val centerRLipsProposal = poseMovingNoTransProposal.centeredAt(\"right.lips.corner\", lmRenderer).get\n    val centerLLipsProposal = poseMovingNoTransProposal.centeredAt(\"left.lips.corner\", lmRenderer).get\n\n    MixtureProposal(centerREyeProposal + centerLEyeProposal + centerRLipsProposal + centerLLipsProposal + 0.2 *: translationProposal)\n  }\n\n\n  /* Collection of all illumination related proposals */\n  def defaultIlluminationProposal(modelRenderer: ParametricModel, target: PixelImage[RGBA])(implicit rnd: Random):\n  ProposalGenerator[RenderParameter] with TransitionProbability[RenderParameter] = {\n    val shOpt = SphericalHarmonicsOptimizer(modelRenderer, target)\n    val shOptimizerProposal = SHLightSolverProposal(shOpt, MeshSurfaceSampling.sampleUniformlyOnSurface(100))\n\n    val lightSHPert = SHLightPerturbationProposal(0.001f, fixIntensity = true)\n    val lightSHIntensity = SHLightIntensityProposal(0.1f)\n    val lightSHBandMixter = SHLightBandEnergyMixer(0.1f)\n    val lightSHSpatial = SHLightSpatialPerturbation(0.05f)\n    val lightSHColor = SHLightColorProposal(0.01f)\n\n    MixtureProposal((5f / 6f) *: MixtureProposal(lightSHSpatial + lightSHBandMixter + lightSHIntensity + lightSHPert + lightSHColor).toParameterProposal + (1f / 6f) *: shOptimizerProposal)\n  }\n\n  /* Collection of all statistical model (shape, texture) related proposals */\n  def neutralMorphableModelProposal(implicit rnd: Random):\n  ProposalGenerator[RenderParameter] with TransitionProbability[RenderParameter] = {\n\n    val shapeC = GaussianMoMoShapeProposal(0.2f)\n    val shapeF = GaussianMoMoShapeProposal(0.1f)\n    val shapeHF = GaussianMoMoShapeProposal(0.025f)\n    val shapeScaleProposal = GaussianMoMoShapeCaricatureProposal(0.2f)\n    val shapeProposal = MixtureProposal(0.1f *: shapeC + 0.5f *: shapeF + 0.2f *: shapeHF + 0.2f *: shapeScaleProposal).toParameterProposal\n\n    val textureC = GaussianMoMoColorProposal(0.2f)\n    val textureF = GaussianMoMoColorProposal(0.1f)\n    val textureHF = GaussianMoMoColorProposal(0.025f)\n    val textureScale = GaussianMoMoColorCaricatureProposal(0.2f)\n    val textureProposal = MixtureProposal(0.1f *: textureC + 0.5f *: textureF + 0.2 *: textureHF + 0.2f *: textureScale).toParameterProposal\n\n    MixtureProposal(shapeProposal + textureProposal )\n  }\n\n  /* Collection of all statistical model (shape, texture, expression) related proposals */\n  def defaultMorphableModelProposal(implicit rnd: Random):\n  ProposalGenerator[RenderParameter] with TransitionProbability[RenderParameter] = {\n\n\n    val expressionC = GaussianMoMoExpressionProposal(0.2f)\n    val expressionF = GaussianMoMoExpressionProposal(0.1f)\n    val expressionHF = GaussianMoMoExpressionProposal(0.025f)\n    val expressionScaleProposal = GaussianMoMoExpressionCaricatureProposal(0.2f)\n    val expressionProposal = MixtureProposal(0.1f *: expressionC + 0.5f *: expressionF + 0.2f *: expressionHF + 0.2f *: expressionScaleProposal).toParameterProposal\n\n\n    MixtureProposal(neutralMorphableModelProposal + expressionProposal)\n  }\n\n  /* Collection of all color transform proposals */\n  def defaultColorProposal(implicit rnd: Random):\n  ProposalGenerator[RenderParameter] with TransitionProbability[RenderParameter] = {\n    val colorC = GaussianColorProposal(RGB(0.01f, 0.01f, 0.01f), 0.01f, RGB(1e-4f, 1e-4f, 1e-4f))\n    val colorF = GaussianColorProposal(RGB(0.001f, 0.001f, 0.001f), 0.01f, RGB(1e-4f, 1e-4f, 1e-4f))\n    val colorHF = GaussianColorProposal(RGB(0.0005f, 0.0005f, 0.0005f), 0.01f, RGB(1e-4f, 1e-4f, 1e-4f))\n\n    MixtureProposal(0.2f *: colorC + 0.6f *: colorF + 0.2f *: colorHF).toParameterProposal\n  }\n\n\ndef fit(targetFn : String, lmFn: String, outputDir: String, modelRenderer: MoMoRenderer, expression: Boolean = true)(implicit rnd: Random):RenderParameter = {\n  val target = PixelImageIO.read[RGBA](new File(targetFn)).get\n  val targetLM = TLMSLandmarksIO.read2D(new File(lmFn)).get.filter(lm => lm.visible)\n\n  PixelImageIO.write(target, new File(s\"$outputDir/target.png\")).get\n\n  val init: RenderParameter = RenderParameter.defaultSquare.fitToImageSize(target.width, target.height)\n\n\n  val sdev = 0.043f\n\n  /* Foreground Evaluator */\n  val pixEval = IsotropicGaussianPixelEvaluator(sdev)\n\n  /* Background Evaluator */\n  val histBGEval = HistogramRGB.fromImageRGBA(target, 25)\n\n  /* Pixel Evaluator */\n  val imgEval = IndependentPixelEvaluator(pixEval, histBGEval)\n\n  /* Prior Evaluator */\n  val priorEval = ProductEvaluator(GaussianShapePrior(0, 1), GaussianTexturePrior(0, 1))\n\n  /* Image Evaluator */\n  val allEval = ImageRendererEvaluator(modelRenderer, imgEval.toDistributionEvaluator(target))\n\n  /* Landmarks Evaluator */\n  val pointEval = IsotropicGaussianPointEvaluator[_2D](4.0) //lm click uncertainty in pixel! -> should be related to image/face size\n  val landmarksEval = LandmarkPointEvaluator(targetLM, pointEval, modelRenderer)\n\n\n\n  //logging\n  val imageLogger = ImageRenderLogger(modelRenderer, new File(s\"$outputDir/\"), \"mc-\").withBackground(target)\n\n  // Metropolis logger\n  val printLogger = PrintLogger[RenderParameter](Console.out, \"\").verbose\n  val mhLogger = printLogger\n\n\n  // keep track of best sample\n  val bestFileLogger = ParametersFileBestLogger(allEval, new File(s\"$outputDir/fit-best.rps\"))\n  val bestSampleLogger = BestSampleLogger(allEval)\n  val parametersLogger = ParametersFileLogger(new File(s\"$outputDir/\"), \"mc-\")\n\n  val fitLogger = bestFileLogger :+ bestSampleLogger\n\n  // pose proposal\n  val totalPose = defaultPoseProposal(modelRenderer)\n\n  //light proposals\n  val lightProposal = defaultIlluminationProposal(modelRenderer, target)\n\n  //color proposals\n  val colorProposal = defaultColorProposal\n\n  //Morphable Model  proposals\n  val momoProposal = if(expression) defaultMorphableModelProposal else neutralMorphableModelProposal\n\n\n  // full proposal filtered by the landmark and prior Evaluator\n  val proposal = MetropolisFilterProposal(MetropolisFilterProposal(MixtureProposal(totalPose + colorProposal + 3f*:momoProposal + 2f *: lightProposal), landmarksEval), priorEval)\n\n  //pose and image chains\n  val imageFitter = MetropolisHastings(proposal, allEval)\n  val poseFitter = MetropolisHastings(totalPose, landmarksEval)\n\n\n\n  println(\"everyting setup. starting fitter ...\")\n\n\n  //landmark chain for initialisation\n  val initDefault: RenderParameter = RenderParameter.defaultSquare.fitToImageSize(target.width, target.height)\n  val init10 = initDefault.withMoMo(init.momo.withNumberOfCoefficients(50, 50, 5))\n  val initLMSamples: IndexedSeq[RenderParameter] = poseFitter.iterator(init10, mhLogger).take(5000).toIndexedSeq\n\n  val lmScores = initLMSamples.map(rps => (landmarksEval.logValue(rps), rps))\n\n  val bestLM = lmScores.maxBy(_._1)._2\n  RenderParameterIO.write(bestLM, new File(s\"$outputDir/fitter-lminit.rps\")).get\n\n  val imgLM = modelRenderer.renderImage(bestLM)\n  PixelImageIO.write(imgLM, new File(s\"$outputDir/fitter-lminit.png\")).get\n\n  def printer(sample: RenderParameter): RenderParameter = {\n  println(s\"${sample.momo.shape} ${sample.momo.color} ${sample.momo.expression}\")\n  sample\n}\n\n  // image chain, fitting\n  val fitsamples = imageFitter.iterator(bestLM, mhLogger).loggedWith(fitLogger).take(10000).toIndexedSeq\n  val best = bestSampleLogger.currentBestSample().get\n\n  val imgBest = modelRenderer.renderImage(best)\n  PixelImageIO.write(imgBest, new File(s\"$outputDir/fitter-best.png\")).get\n  best\n}\n\n}"
  },
  {
    "path": "src/main/scala/fitting/experiments/QualitativeLFW.scala",
    "content": "/*\n * Copyright University of Basel, Graphics and Vision Research Group\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage fitting.experiments\n\nimport java.io.File\n\nimport ch.unibas.cs.gravis.facepipeline.BU3DDataProvider\nimport fitting.StandardFitScript\nimport scalismo.faces.color.RGBA\nimport scalismo.faces.io.MoMoIO\nimport scalismo.faces.momo.MoMo\nimport scalismo.faces.sampling.face.MoMoRenderer\nimport scalismo.utils.Random\n\nimport scala.reflect.io.Path\n\nobject QualitativeLFW extends App{\n  scalismo.initialize()\n  val seed = 1986L\n  implicit val rnd = new Random(seed)\n\n  def fitModel(model:MoMo, modelName: String) = {\n    val targetsPath =  BU3DDataProvider.repositoryRoot + \"/recognition-experiment/fit-lfw-qualitative/lfwSelection/\"\n    val outPath =  BU3DDataProvider.repositoryRoot + \"/recognition-experiment/fit-lfw-qualitative/lfwResults/\" + modelName + \"/\"\n\n\n    val files = new File(targetsPath).listFiles.filter(_.getName.endsWith(\".png\"))\n    val listTarget = files.map(p => p.getName.substring(0, p.getName.length - 4)).toList\n\n\n\n    listTarget.foreach{ targetName =>\n      val outPathTarget = outPath + targetName + \"/\"\n\n      if (!Path(outPathTarget).exists) {\n        try {\n          Path(outPathTarget).createDirectory(failIfExists = false)\n\n          val renderer = MoMoRenderer(model, RGBA.BlackTransparent).cached(5)\n\n          val targetFn = targetsPath + targetName + \".png\"\n          val targetLM = targetsPath + targetName + \"_face0.tlms\"\n\n          StandardFitScript.fit(targetFn, targetLM, outPathTarget, renderer)\n        }\n      }\n    }\n  }\n\n  val bfm = MoMoIO.read(new File( BU3DDataProvider.repositoryRoot + \"/data/modelbuilding/model/model2017-1_face12_nomouth.h5\")).get\n  val bfmOld = MoMoIO.read(new File(BU3DDataProvider.repositoryRoot + \"/model2009-face12.h5\")).get\n  val bu3d = MoMoIO.read(new File(BU3DDataProvider.repositoryRoot + \"/data/modelbuilding/model/bu3d-face12_nomouth.h5\")).get\n\n  fitModel(bfm, \"bfm\")\n  fitModel(bfmOld, \"bfmOld\")\n  fitModel(bu3d, \"bu3d\")\n}"
  },
  {
    "path": "src/main/scala/fitting/experiments/RecognitionMultiPIE.scala",
    "content": "/*\n * Copyright University of Basel, Graphics and Vision Research Group\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage fitting.experiments\n\nimport java.io.File\n\nimport ch.unibas.cs.gravis.facepipeline.BU3DDataProvider\nimport fitting.StandardFitScript\nimport scalismo.faces.color.RGBA\nimport scalismo.faces.io.{MoMoIO, RenderParameterIO}\nimport scalismo.faces.momo.MoMo\nimport scalismo.faces.sampling.face.MoMoRenderer\nimport scalismo.utils.Random\n\nimport scala.reflect.io.Path\n\n// script to fit multiPie Neutral Samples with landmarks\nobject RecognitionMultiPiePose extends App{\n  scalismo.initialize()\n  val seed = 1986L\n  implicit val rnd = new Random(seed)\n\n  def fitModel(model:MoMo, modelName: String) = {\n    val targetsPath = BU3DDataProvider.repositoryRoot + \"/recognition-experiment\"\n    val outPath = targetsPath + \"/results/\" + modelName + \"/\"\n\n\n    val files = new File(targetsPath + \"/originals/\").listFiles.filter(_.getName.endsWith(\".png\"))\n    val listTarget = files.map(p => p.getName.substring(0, p.getName.length - 4)).toList\n\n\n\n    listTarget.foreach(targetName => {\n      val outPathTarget = outPath + targetName + \"/\"\n\n      if (!Path(outPathTarget).exists) {\n        try {\n          Path(outPathTarget).createDirectory(failIfExists = false)\n\n          val renderer = MoMoRenderer(model, RGBA.BlackTransparent).cached(5)\n\n          val targetFn = targetsPath + \"/originals/\" +  targetName + \".png\"\n          val targetLM = targetsPath + \"/landmarks/\" + targetName + \"_face0.tlms\"\n\n          StandardFitScript.fit(targetFn, targetLM, outPathTarget, renderer, false)\n        }\n      }\n    })\n  }\n  val bfm = MoMoIO.read(new File(BU3DDataProvider.repositoryRoot + \"data/modelbuilding/model/model2017-1_face12_nomouth.h5\")).get.neutralModel\n  val bfmOld = MoMoIO.read(new File(BU3DDataProvider.repositoryRoot + \"/export/faces/model/bfm2009/model2009-face12.h5\")).get.neutralModel\n  val bu3d = MoMoIO.read(new File(BU3DDataProvider.repositoryRoot + \"/export/faces/projects/pami-ppm2017/basel-face-pipeline/data/modelbuilding/model/bu3d-face12_nomouth.h5\")).get.neutralModel\n\n  fitModel(bfm, \"bfm\")\n  fitModel(bfmOld, \"bfmOld\")\n  fitModel(bu3d, \"bu3d\")\n\n\n}\n\n// script to fit multiPie Expression Samples with landmarks\nobject RecognitionMultiPieExpression extends App{\n  scalismo.initialize()\n  val seed = 1986L\n  implicit val rnd = new Random(seed)\n\n  def fitModel(model:MoMo, modelName: String) = {\n    val targetsPath = \"/export/faces/projects/pami-ppm2017/experiments/fit-multipie-recognition/multipie/\"\n    val outPath = targetsPath + \"/results/\" + modelName + \"/\"\n\n\n    val files = new File(targetsPath + \"/originalsExpressionsNotForPublishing\").listFiles.filter(_.getName.endsWith(\".png\"))\n    val listTarget = files.map(p => p.getName.substring(0, p.getName.length - 4)).toList\n\n\n\n    listTarget.foreach(targetName => {\n      val outPathTarget = outPath + targetName + \"/\"\n\n      if (!Path(outPathTarget).exists) {\n        try {\n          Path(outPathTarget).createDirectory(failIfExists = false)\n\n          val renderer = MoMoRenderer(model, RGBA.BlackTransparent).cached(5)\n\n          val targetFn = targetsPath + \"/originalsExpressionsNotForPublishing/\" +  targetName + \".png\"\n          val targetLM = targetsPath + \"landmarksExpressions/\" + targetName + \"_face0.tlms\"\n\n          StandardFitScript.fit(targetFn, targetLM, outPathTarget, renderer)\n        }\n      }\n    })\n  }\n  val bu3dEx = MoMoIO.read(new File(BU3DDataProvider.repositoryRoot + \"/data/modelbuilding/model/bu3d-face12_nomouth.h5\")).get\n  val bfmEx = MoMoIO.read(new File(BU3DDataProvider.repositoryRoot + \"/data/modelbuilding/model/model2017-1_face12_nomouth.h5\")).get\n\n  fitModel(bfmEx, \"bfmEx\")\n  fitModel(bu3dEx, \"bu3dEx\")\n\n}\n\n// Script to calculate recognition results over pose fitted multipie data\nobject RecognitionEvaluation extends App {\n\n  case class Fit( id: String, pose: String, coeffs: IndexedSeq[Double])\n\n  case class Match( id: String, similarity: Double)\n\n  val models = IndexedSeq(\"bfmOld\", \"bu3d\", \"bfm\")\n  val resultsPath = BU3DDataProvider.repositoryRoot + \"/recognition-experiment/results/\"\n\n  models.foreach { model =>\n    val resultPath = resultsPath + model + \"/\"\n    val files = new File(resultPath).listFiles.filter(_.isDirectory).toIndexedSeq.sortBy(_.getAbsoluteFile)\n    val allFits = files.map { f =>\n      val name = f.getName\n      val id = name.substring(0, 3)\n      val pose = name.substring(10, 13)\n      val rps = RenderParameterIO.read(new File(resultPath + name + \"/fit-best.rps\")).get\n      val coeffs = rps.momo.color ++ rps.momo.shape\n\n      Fit(id, pose, coeffs)\n    }\n\n    val gallery = allFits.filter(fit => fit.pose == \"051\")\n\n    val listOfPoses = IndexedSeq(\"051\", \"140\", \"130\", \"080\")\n    val listOfExperiments = listOfPoses.map { queryPose =>\n      allFits.filter(fit => fit.pose == queryPose)\n    }\n\n    val queriesWithSimilarities = listOfExperiments.map { queriesInExperiment =>\n      queriesInExperiment.map { query =>\n        val similaritiesForQuery = gallery.map { subject =>\n          Match(subject.id, cosineAngle(query.coeffs, subject.coeffs))\n        }\n        (query.id, similaritiesForQuery)\n      }\n    }\n\n    val correctMatchesPerExperiment: IndexedSeq[Double] = queriesWithSimilarities.map { experiment =>\n      val correctMatches = experiment.map { case (query_id, similarities) =>\n        val bestMatch = similarities.maxBy(m => m.similarity)\n        //println(query_id, bestMatch)\n        if (bestMatch.id == query_id) 1.0 else 0.0\n      }.sum\n      correctMatches / experiment.length\n    }\n\n    println(model + correctMatchesPerExperiment)\n\n  }\n\n\n  def cosineAngle(aa: IndexedSeq[Double], bb: IndexedSeq[Double]): Double = {\n    import breeze.linalg._\n\n    val a = DenseVector(aa.toArray)\n    val b = DenseVector(bb.toArray)\n\n    (a dot b) / (norm(a) * norm(b))\n  }\n\n}\n\n// Script to calculate recognition results over expression on fitted multipie data\n\nobject RecognitionEvaluationEx extends App {\n\n  case class Fit( id: String, pose: String, coeffs: IndexedSeq[Double])\n\n  case class Match( id: String, similarity: Double)\n\n  val models = IndexedSeq(\"bfmEx\", \"bu3dEx\")\n  val resultsPath =  BU3DDataProvider.repositoryRoot + \"/recognition-experiment/results/\"\n\n  models.foreach { model =>\n    val resultPath = resultsPath + model + \"/\"\n    val files = new File(resultPath).listFiles.filter(_.isDirectory).toIndexedSeq.sortBy(_.getAbsoluteFile)\n    val allFits = files.map { f =>\n      val name = f.getName\n      val id = name.substring(0, 3)\n      val expression = name.substring(7, 9)\n      val rps = RenderParameterIO.read(new File(resultPath + name + \"/fit-best.rps\")).get\n      val coeffs = rps.momo.color ++ rps.momo.shape\n\n      Fit(id, expression, coeffs)\n    }\n    val gallery = allFits.filter(fit => fit.pose == \"01\")\n\n    val listOfPoses = IndexedSeq(\"01\",\"02\")\n    val listOfExperiments = listOfPoses.map { queryPose =>\n      allFits.filter(fit => fit.pose == queryPose)\n    }\n\n    val queriesWithSimilarities = listOfExperiments.map { queriesInExperiment =>\n      queriesInExperiment.map { query =>\n        val similaritiesForQuery = gallery.map { subject =>\n          Match(subject.id, cosineAngle(query.coeffs, subject.coeffs))\n        }\n        (query.id, similaritiesForQuery)\n      }\n    }\n\n    val correctMatchesPerExperiment: IndexedSeq[Double] = queriesWithSimilarities.map { experiment =>\n      val correctMatches = experiment.map { case (query_id, similarities) =>\n        val bestMatch = similarities.maxBy(m => m.similarity)\n       // println(query_id, bestMatch)\n        if (bestMatch.id == query_id) 1.0 else 0.0\n      }.sum\n      correctMatches / experiment.length\n    }\n\n    println(model + correctMatchesPerExperiment)\n\n  }\n\n\n  def cosineAngle(aa: IndexedSeq[Double], bb: IndexedSeq[Double]): Double = {\n    import breeze.linalg._\n\n    val a = DenseVector(aa.toArray)\n    val b = DenseVector(bb.toArray)\n\n    (a dot b) / (norm(a) * norm(b))\n  }\n\n}"
  },
  {
    "path": "src/main/scala/modelbuilding/ModelBuilding.scala",
    "content": "/*\n * Copyright University of Basel, Graphics and Vision Research Group\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage modelbuilding\n\nimport breeze.linalg.DenseMatrix\nimport ch.unibas.cs.gravis.facepipeline.{BU3DDataProvider, DataProvider, ExpressionType, PipelineStep}\nimport scalismo.common._\nimport scalismo.faces.color.{RGB, RGBA}\nimport scalismo.faces.io.MoMoIO\nimport scalismo.faces.mesh.{ColorNormalMesh3D, VertexColorMesh3D}\nimport scalismo.faces.momo.MoMo.NeutralWithExpression\nimport scalismo.faces.momo.{MoMo, PancakeDLRGP}\nimport scalismo.faces.render.Transform3D\nimport scalismo.geometry.{Point, Vector, _3D}\nimport scalismo.kernels.{DiagonalKernel, GaussianKernel, MatrixValuedPDKernel}\nimport scalismo.mesh.{SurfacePointProperty, TriangleMesh, TriangleMesh3D}\nimport scalismo.numerics.{UniformMeshSampler3D}\nimport scalismo.registration.{LandmarkRegistration, RigidTransformation}\nimport scalismo.statisticalmodel.{GaussianProcess, LowRankGaussianProcess}\nimport scala.util.{Success, Try}\n\nobject ModelBuilding {\n\n  def main(args: Array[String]) {\n    scalismo.initialize()\n    ModelBuilding(BU3DDataProvider).run()\n  }\n\n}\n\ncase class ModelBuilding(dataProvider: DataProvider)  extends PipelineStep {\n\n  override def run() {\n    createMeshesWithVertexColor()\n//    buildMoMoExpress()\n    buildMoMoExpress(\"face12_nomouth\")\n  }\n\n  /**\n   * Extracts color for all registration results from the input meshes.\n   */\n  def createMeshesWithVertexColor(): Unit = {\n    println(\"Extracting color from meshes ...\")\n\n    val expressions = dataProvider.expressions\n    expressions.zipWithIndex.flatMap { case (exp,idx) =>\n      println(s\"... processing expression ${exp} (${idx+1}/${expressions.size})\")\n\n      val ids = dataProvider.registration.ids(exp)\n      ids.zipWithIndex.map { case(id,idx) =>\n        println(s\"... ... processing id ${id} (${idx+1}/${ids.size})\")\n\n        val registeredShape: TriangleMesh[_3D] = dataProvider.registration.loadMesh(id, exp).get\n        val coloredMesh: ColorNormalMesh3D = dataProvider.incoming.loadColoredMesh(id, exp).get\n        val vertexColor = extractVertexColor(registeredShape, coloredMesh)\n\n        val registeredShapeWithVertexColor = ColorNormalMesh3D(registeredShape, vertexColor, registeredShape.vertexNormals)\n        dataProvider.model.saveColoredMesh(id, exp, registeredShapeWithVertexColor).get\n      }\n\n    }\n  }\n\n  /**\n   * Extracts per vertex color for the registered mesh from the input mesh. The correspondence for each\n   * vertex is sought after along the normal.\n   *\n   * @param registeredMesh Registration result without color.\n   * @param colorMesh Input mesh with color.\n   * @return Registeration result with color.\n   */\n  def extractVertexColor(registeredMesh: TriangleMesh3D, colorMesh: ColorNormalMesh3D): SurfacePointProperty[RGBA] = {\n\n    val pointsReg = registeredMesh.pointSet.points.toIndexedSeq\n    val normalsReg = registeredMesh.vertexNormals.pointData\n\n    val shapeFromColorMesh = colorMesh.shape\n    val meshOperations = shapeFromColorMesh.operations\n\n    val colors = pointsReg.zip(normalsReg).map {\n      case (point, normal) =>\n\n        val intersections = meshOperations.getIntersectionPointsOnSurface(point, normal)\n\n        val sortedIntersections = intersections.map { i =>\n          val pointI = shapeFromColorMesh.position(i._1, i._2)\n          val dist = (pointI - point).norm\n          (i, dist)\n        }.sortWith((a, b) => a._2 < b._2)\n\n        if (sortedIntersections.nonEmpty && sortedIntersections.head._2 < 2.0) {\n          val i = sortedIntersections.head._1\n          colorMesh.color(i._1, i._2)\n        } else {\n          RGBA(0.9, 0.8, 0.1, 0.0)\n        }\n    }\n\n    new SurfacePointProperty[RGBA](registeredMesh.triangulation, colors)\n  }\n\n\n\n\n  /**\n   * Build a model from meshes with vertex color.\n   * This step assumes that the registration was performed using the \"bfm_nomouth\" masked reference.\n   */\n  def buildMoMoExpress(maskType: String = \"bfm_nomouth\"): Unit = {\n    println(\"Building model from meshes with vertex color...\")\n\n    val mask = {\n      if ( maskType != \"bfm_nomouth\" ) Some(dataProvider.loadMeshMask(\"bfm_nomouth\", \"face12_nomouth\").get)\n      else None\n    }\n\n    val reference = {\n      val originalRef = dataProvider.registration.loadPriorModel(dataProvider.Neutral).get.referenceMesh\n      if ( mask.isDefined ) originalRef.operations.maskPoints(mask.get).transformedMesh\n      else originalRef\n    }\n    val ids = dataProvider.registration.ids(dataProvider.Neutral)\n    val otherExpressions = dataProvider.expressions.filter(exp => exp != dataProvider.Neutral)\n\n    buildModel(ids, otherExpressions, reference)\n\n\n    def buildModel(ids: Seq[dataProvider.Person], expressions: Seq[ExpressionType], reference: TriangleMesh3D) = {\n      val data: Seq[Try[(DiscreteField[_3D, RGBA], VertexColorMesh3D, Seq[NeutralWithExpression])]] = ids.zipWithIndex.map {\n        case (id, idx) =>\n          println(s\"... loading data for ${id.id} (${idx + 1}/${ids.size})\")\n          prepareData(reference, expressions, id)\n      }\n\n      val neutralMeshes: Seq[VertexColorMesh3D] = data.collect({ case Success(e) => e._2 })\n      val neutralWithExpressions: Seq[NeutralWithExpression] = data.collect({ case Success(e) => e._3 }).flatten\n\n      println(\".. data loaded ...\")\n\n      val momo = MoMo.buildFromRegisteredSamples(\n        reference = reference,\n        samplesShape = neutralMeshes.toIndexedSeq,\n        samplesColor = neutralMeshes.toIndexedSeq,\n        samplesExpression = neutralWithExpressions.toIndexedSeq,\n        shapeNoiseVariance = 0,\n        colorNoiseVariance = 0,\n        expressionNoiseVariance = 0)\n\n      println(\"... initial model is built - (not handling missing color) ...\")\n      println(s\"... ... shape rank: ${momo.shape.rank}\")\n      println(s\"... ... color rank: ${momo.color.rank}\")\n      println(s\"... ... exp rank: ${momo.expression.rank}\")\n\n\n      val colors: Seq[DiscreteField[_3D, RGBA]] = data.collect({ case Success(e) => e._1 })\n\n      val colorModel = buildColorModel(reference, colors.toIndexedSeq, colors.size - 1)\n      println(\"... color model is built ...\")\n\n      val bu3dModel = MoMo(momo.referenceMesh, momo.shape, colorModel, momo.expression, momo.landmarks)\n\n      val newModelPath = dataProvider.repositoryRoot / \"data\" / \"modelbuilding\" / \"model\" / s\"bu3d-${maskType}.h5\"\n      newModelPath.jfile.getParentFile.mkdirs()\n      MoMoIO.write(bu3dModel, newModelPath.jfile)\n      println(\"... model building finished!\")\n    }\n\n\n    /** Align vcm to reference. */\n    def align(reference: TriangleMesh3D, vcm: VertexColorMesh3D): VertexColorMesh3D = {\n      val t: RigidTransformation[_3D] = LandmarkRegistration.rigid3DLandmarkRegistration(\n        vcm.shape.pointSet.points.zip(reference.pointSet.points).toSeq,\n        Point(0, 0, 0)\n      )\n\n      val transform = new Transform3D {\n        override def apply(x: Point[_3D]): Point[_3D] = t(x)\n\n        override def apply(v: Vector[_3D]): Vector[_3D] = t.rotation(v.toPoint).toVector\n      }\n\n      vcm.transform(transform)\n    }\n\n    /** Align mesh to reference. */\n    def alignIt(vcm: VertexColorMesh3D): VertexColorMesh3D = align(reference, vcm)\n\n    /** Execute function f for neutral and expression in ne. */\n    def doForBothInNWE(ne: NeutralWithExpression, f: VertexColorMesh3D => VertexColorMesh3D): NeutralWithExpression = {\n      ne match {\n        case NeutralWithExpression(n, e) =>\n          NeutralWithExpression(f(n), f(e))\n      }\n    }\n\n    /** Mask mesh with precalculated mask. */\n    def maskMesh(mesh: VertexColorMesh3D): VertexColorMesh3D = {\n      if (mask.isDefined) {\n        val reducer = mesh.shape.operations.maskPoints(mask.get)\n        VertexColorMesh3D(\n          reducer.transformedMesh,\n          SurfacePointProperty.sampleSurfaceProperty(reducer.applyToSurfaceProperty(mesh.color), _.head)\n        )\n      } else {\n        println(\"Warning: maskMesh is called but no mask is defined!\")\n        println(\"\\t Hence the mesh is left unaltered.\")\n        mesh\n      }\n    }\n\n\n    /**\n      * Loads the neutral face and the expressions.\n      *\n      * @param reference\n      * @param otherExpressions\n      * @param id\n      * @return\n      */\n    def prepareData(reference: TriangleMesh3D, otherExpressions: Seq[ExpressionType], id: dataProvider.Person) = {\n\n      for {\n        unalignedRegistrationWithColor <- dataProvider.model.loadColoredMesh(id, dataProvider.Neutral)\n      } yield {\n\n        val unaligned = VertexColorMesh3D(\n          unalignedRegistrationWithColor.shape,\n          unalignedRegistrationWithColor.color.asInstanceOf[SurfacePointProperty[RGBA]]\n        )\n        val masked = if ( mask.isDefined) maskMesh(unaligned) else unaligned\n        val neutral = alignIt(masked)\n\n        val neutralWithExpressionList = otherExpressions.par.map {\n          exp =>\n            println(s\"... ... ${exp}\")\n            val unalignedExpression = dataProvider.model.loadColoredMesh(id, exp).get\n\n            val unaligned = VertexColorMesh3D(\n              unalignedExpression.shape,\n              unalignedExpression.color.asInstanceOf[SurfacePointProperty[RGBA]]\n            )\n            val masked = if (mask.isDefined) maskMesh(unaligned) else unaligned\n\n            val alignedExpression = alignIt(masked)\n            NeutralWithExpression(neutral, alignedExpression)\n        }.toIndexedSeq\n\n        val neutralColor = DiscreteField[_3D, RGBA](neutral.shape.pointSet, neutral.color.pointData)\n        (\n          neutralColor,\n          neutral,\n          neutralWithExpressionList\n        )\n      }\n\n    }\n  }\n\n\n\n  /**\n    * Calculate the rigid 3d transform that aligns the mesh to the target.\n    */\n  def calculateShapeAligningTransform(mesh: TriangleMesh3D, target: TriangleMesh3D): Transform3D = {\n    val t: RigidTransformation[_3D] = LandmarkRegistration.rigid3DLandmarkRegistration(\n      mesh.pointSet.points.zip(target.pointSet.points).toSeq,\n      Point(0,0,0)\n    )\n\n    val transform = new Transform3D{\n      override def apply(x: Point[_3D]): Point[_3D] = t(x)\n      override def apply(v: Vector[_3D]): Vector[_3D] = t.rotation(v.toPoint).toVector\n    }\n\n    transform\n  }\n\n\n  /**\n   * Builds a color model. This model building accounts for missing color values.\n   *\n   * @param referenceMesh Reference mesh.\n   * @param colorFields Colorfields to build the model from.\n   * @param numberOfComponents Number of desired components.\n   * @return Color model.\n   */\n  def buildColorModel(\n    referenceMesh: TriangleMesh3D,\n    colorFields: IndexedSeq[DiscreteField[_3D, RGBA]],\n    numberOfComponents: Int\n  ): PancakeDLRGP[_3D, RGB] = {\n\n    val domain = referenceMesh.pointSet\n\n    val meanRGBA = DiscreteField[_3D, RGBA](domain, saveMean(colorFields.map(_.data)))\n\n    val meanFreeColors = saveMeanFreeColors(colorFields.map(_.data), meanRGBA.data)\n    val meanFreeColorFields = meanFreeColors.map{ a => DiscreteField(domain, a) }\n\n    val meanRGB = DiscreteField[_3D, RGB](domain, meanRGBA.data.map(_.toRGB))\n\n    val kernel: MatrixValuedPDKernel[_3D] = ReducedEntryKernel(meanFreeColorFields)\n    val gp: GaussianProcess[_3D, RGB] = GaussianProcess(meanRGB.interpolateNearestNeighbor(), kernel)\n    val lrgp = LowRankGaussianProcess.approximateGP[_3D,RGB](gp,UniformMeshSampler3D(referenceMesh,500),numberOfComponents)\n    val grf = lrgp.discretize(domain)\n\n    PancakeDLRGP(grf)\n  }\n\n  /**\n   * Calculates the mean based on available samples only.\n   * If no value is available, i.e. the alpha channel is zero for all samples at a given vertex, BlackTransparent is set as mean color.\n   */\n  def saveMean(colorVectors: Seq[IndexedSeq[RGBA]]): IndexedSeq[RGBA] = {\n    val numberOfColorValues = colorVectors.size\n    val numberOfSamples = colorVectors.head.size\n\n    val accumulatedColor = Array.fill(numberOfSamples)(RGBA.BlackTransparent)\n    val numberOfUsedColors = Array.fill(numberOfSamples)(0)\n\n    for (\n      i <- 0 until numberOfColorValues;\n      j <- 0 until numberOfSamples\n    ) {\n      val color = colorVectors(i)(j)\n      if (color.a == 1.0) {\n        accumulatedColor(j) = accumulatedColor(j) + color\n        numberOfUsedColors(j) += 1\n      }\n    }\n\n    val mean = accumulatedColor.zip(numberOfUsedColors).map {\n      case (sumOfColors, counter) =>\n        if (counter == 0) RGBA.BlackTransparent\n        else sumOfColors / counter\n    }\n\n    mean\n  }\n\n  /**\n   * Substracts the mean color vector from all color samples. Else BlackTransparent.\n   */\n  def saveMeanFreeColors(colorVectors: Seq[IndexedSeq[RGBA]], meanColorVector: IndexedSeq[RGBA]): Seq[IndexedSeq[RGBA]] = {\n    colorVectors.map { colorVector =>\n      colorVector.zip(meanColorVector).map {\n        case (color, meanColor) =>\n          if (color.a == 1.0) {\n            val d = RGBA(color.r - meanColor.r, color.g - meanColor.g, color.b - meanColor.b, color.a)\n            d\n          } else {\n            RGBA.BlackTransparent\n          }\n      }\n    }\n  }\n\n  /**\n   * Covariance kernel that has a backupkernel used when some of the data is missing.\n   *\n   * @param colorFields Input data with possibly some data missing.\n   */\n  case class MissingEntryKernel(\n                                 colorFields: Seq[DiscreteField[_3D, RGBA]],\n                                 backupKernel: MatrixValuedPDKernel[_3D] = DiagonalKernel(GaussianKernel[_3D](10), 3) * 0.0001\n                               ) extends MatrixValuedPDKernel[_3D] {\n\n    val fs = colorFields.map(f => f.interpolateNearestNeighbor())\n\n    override protected def k(x: Point[_3D], y: Point[_3D]): DenseMatrix[Double] = {\n\n      val correlation = fs.foldLeft(DenseMatrix.zeros[Double](outputDim, outputDim)) { (sum, field) =>\n        val xc = field(x)\n        val yc = field(y)\n        val addend =\n          if (xc.a > 0.5 && yc.a > 0.5) {\n            xc.toRGB.toVector.outer(yc.toRGB.toVector).toBreezeMatrix\n          } else {\n            backupKernel(x, y)\n          }\n        sum + addend * (1.0 / fs.size)\n      }\n\n      correlation\n    }\n\n    override def domain: Domain[_3D] = RealSpace[_3D]\n\n    override def outputDim: Int = 3\n  }\n\n\n  /**\n    * Kernel estimating the covariance only on the available data.\n    */\n  case class ReducedEntryKernel(\n                                 colorFields: Seq[DiscreteField[_3D, RGBA]],\n                                 backupKernel: MatrixValuedPDKernel[_3D] = DiagonalKernel(GaussianKernel[_3D](5), 3) * 0.1\n                               ) extends MatrixValuedPDKernel[_3D] {\n\n    val fs = colorFields.map(f => f.interpolateNearestNeighbor())\n    private val originalDomain = colorFields.head.domain\n    val countEntries = DiscreteField(originalDomain,colorFields.foldLeft(\n      IndexedSeq.fill[Int](originalDomain.numberOfPoints)(0)\n    ) { case (sum, field) =>\n        field.data.map(c => if(c.a==1.0) 1 else 0).zip(sum).map(p => p._1+p._2)\n    }).interpolateNearestNeighbor()\n\n    override protected def k(x: Point[_3D], y: Point[_3D]): DenseMatrix[Double] = {\n      var count = 0\n\n      val correlation = fs.foldLeft(DenseMatrix.zeros[Double](outputDim, outputDim)) { (sum, field) =>\n        val xc = field(x)\n        val yc = field(y)\n        if (xc.a > 0.5 && yc.a > 0.5) {\n          val addend = xc.toRGB.toVector.outer(yc.toRGB.toVector).toBreezeMatrix\n          count += 1\n          sum + addend\n        } else {\n          sum\n        }\n      }\n\n      if (count>0) {\n        correlation*(1.0/count)\n      } else {\n        backupKernel(x,y)\n      }\n    }\n\n    override def domain: Domain[_3D] = RealSpace[_3D]\n\n    override def outputDim: Int = 3\n  }\n\n}\n"
  },
  {
    "path": "src/main/scala/preprocessing/ConvertBu3DRawData.scala",
    "content": "/*\n * Copyright University of Basel, Graphics and Vision Research Group\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage preprocessing\n\nimport java.awt.image.BufferedImage\nimport java.io.{File, FileInputStream, InputStream}\nimport javax.imageio.ImageIO\n\nimport ch.unibas.cs.gravis.facepipeline._\nimport scalismo.faces.landmarks.TLMSLandmark3D\nimport scalismo.faces.mesh.{ColorNormalMesh3D, TextureMappedProperty, VertexPropertyPerTriangle}\nimport scalismo.faces.render.Transform3D\nimport scalismo.faces.utils.ResourceManagement\nimport scalismo.faces.color.RGBA\nimport scalismo.faces.image.PixelImage\nimport scalismo.common.PointId\nimport scalismo.geometry._\nimport scalismo.mesh._\nimport scalismo.registration.{LandmarkRegistration, RigidTransformation, TranslationTransform}\n\nimport scala.io.Source\nimport scala.reflect.io._\nimport scala.collection.mutable.ListBuffer\nimport scala.util.Try\n\n\n\nobject ConvertBu3DRawData {\n\n  def main(args: Array[String]) {\n    import ch.unibas.cs.gravis.facepipeline.BU3DDataProvider\n    scalismo.initialize()\n    ConvertBu3DRawData(BU3DDataProvider).run()\n  }\n\n}\n\n\n\ncase class ConvertBu3DRawData(dataProvider : DataProvider) extends PipelineStep {\n\n  override def run() {\n    val inputDirectory = dataProvider.repositoryRoot / \"data\" / \"bu3dfe\" / \"original\"\n    val ids = getIds(inputDirectory)\n    println(s\"found ${ids.size} ids....\")\n\n    ids.take(4).foreach { id => // todo: remove take(4) to build on full database\n      preprocessMaximalExpressions(id, inputDirectory)\n    }\n  }\n\n  def getIds(directory: Path): Seq[dataProvider.Person] = {\n    require(\n      directory.isDirectory,\n      \"Expected path to the parent DIRECTORY containing the 100 folders of the BU3D, one for each person.\"\n    )\n    val subDirectories = directory.toDirectory.list\n    val subDirectoryNames = subDirectories.map(_.name).toIndexedSeq\n    val sortedList = subDirectoryNames.sortWith((l, r) => l.compareTo(r) < 0)\n    sortedList.map { subdir =>\n      val filenames = (directory / subdir).toDirectory.list.map(_.name).toIndexedSeq\n      val filename = filenames.filter(file => file.contains(\"NE00\") && file.contains(\"RAW\") && file.contains(\".wrl\")).head\n      dataProvider.personFromFilename(filename)\n    }\n  }\n\n  def getFilestem(id: dataProvider.Person, expression: ExpressionType, mask: MaskType): String = {\n    val base = id + expression.toString\n    s\"${id.id}${expression}${id.raceTag}${mask}\"\n  }\n\n  def preprocessMaximalExpressions(id: dataProvider.Person, inputDirectory: Path): Unit = {\n    import BU3DDataProvider.{RAW,F3D,Original,Aligned}\n\n    val expressions = dataProvider.expressions\n    val outputPath = dataProvider.repositoryRoot / \"data\" / \"incoming\"\n\n    expressions.foreach { expression =>\n      Seq(RAW,F3D).map { mask =>\n        val meshStem = getFilestem(id, expression, mask)\n        val textureStem = getFilestem(id, expression, F3D)\n\n        val meshPath = outputPath / \"mesh\" / s\"${id.id}${expression}${id.raceTag}${mask}.ply\"\n        if (! meshPath.jfile.exists() ) {\n          println(meshStem + \" converting mesh\")\n          val texture = loadTexture(id, textureStem, inputDirectory)\n          val mesh = loadBu3DWRLHacky(id, meshStem, inputDirectory, texture)\n          dataProvider.incoming.saveMesh(id, expression, mask, Original, mesh).get\n        } else\n          println(meshStem + \" already converted mesh\")\n\n        // convert all available landmarks\n        val landmarkPath = outputPath / \"landmarks\" / s\"${id.id}${expression}${id.raceTag}${mask}.tlms\"\n        if (! landmarkPath.jfile.exists() ) {\n          println(meshStem + \" converting landmarks\")\n          if (meshStem.endsWith(\"F3D\")) {\n            val landmarks = readBU3DFEbnd(inputDirectory / id.id / s\"${meshStem}.bnd\").get\n            dataProvider.incoming.saveLandmarks(id, expression, mask, Original, landmarks.map { lm => Landmark[_3D](id = lm.id, point = lm.point) }).get\n          }\n          if (meshStem.endsWith(\"RAW\")) {\n            val landmarks = readBU3DFEpse(inputDirectory / id.id / s\"${meshStem}.pse\").get\n            dataProvider.incoming.saveLandmarks(id, expression, mask, Original, landmarks.map { lm => Landmark[_3D](id = lm.id, point = lm.point) }).get\n          }\n        } else {\n          println(meshStem + \" already converted landmarks\")\n        }\n      }\n\n    }\n\n    // do alignment after converting all data as we need for each id raw+f3d available\n    expressions.foreach { expression =>\n      // align f3d data to original raw data\n      val meshStem = getFilestem(id, expression, F3D)\n      val landmarkPath = outputPath / \"landmarks\" / s\"${id.id}${expression}${id.raceTag}${F3D}_aligned.tlms\"\n      val meshPath = outputPath / \"mesh\" / s\"${id.id}${expression}${id.raceTag}${F3D}_aligned.ply\"\n      if ( (!landmarkPath.jfile.exists()) && (!meshPath.jfile.exists()) ) {\n        println(meshStem + \" calculating alignment\")\n        val rawMesh = dataProvider.incoming.loadColoredMesh(id, expression, RAW).get\n        val rawLandmarks = dataProvider.incoming.loadLandmarks(id, expression, RAW, Original).get\n        val f3dMesh = dataProvider.incoming.loadColoredMesh(id, expression, F3D).get\n        val f3dLandmarks = dataProvider.incoming.loadLandmarks(id, expression, F3D, Original).get\n\n        val aligned = align(f3dMesh, f3dLandmarks, rawMesh, rawLandmarks)\n\n        aligned match {\n          case Some((meshAligned, landmarksAligned)) =>\n            dataProvider.incoming.saveLandmarks(id, expression, F3D, Aligned, landmarksAligned).get\n            dataProvider.incoming.saveMesh(id, expression, F3D, Aligned, meshAligned).get\n          case None =>\n        }\n      } else {\n        println(meshStem + \" already aligned\")\n      }\n    }\n\n  }\n\n  def loadTexture(id: dataProvider.Person, stem: String, inputDirectory: Path): PixelImage[RGBA] = {\n    import scalismo.faces.image.BufferedImageConverter\n    val textureName = inputDirectory / id.id / s\"${stem}.bmp\"\n    println(textureName.toString())\n    val img: BufferedImage = ImageIO.read(new java.io.File(textureName.toString()))\n    BufferedImageConverter.toPixelImage(img)\n  }\n\n  def loadBu3DWRLHacky(id: dataProvider.Person, stem: String, inputDirectory: Path, texture: PixelImage[RGBA]): ColorNormalMesh3D = {\n    val meshName = inputDirectory / id.id / s\"$stem.wrl\"\n    val lines = Source.fromFile(meshName.toString()).getLines()\n\n    val coordinates = ListBuffer[Point[_3D]]()\n    val textureCoordinates = ListBuffer[Point[_2D]]()\n    val textureCoordinateIndex = ListBuffer[TriangleCell]()\n    val triangleVertexIndex = ListBuffer[TriangleCell]()\n    parseBu3DWRL(lines, coordinates, triangleVertexIndex, textureCoordinates, textureCoordinateIndex)\n\n    val vc = coordinates.toIndexedSeq\n    val tc = textureCoordinates.toIndexedSeq\n    val tvi = TriangleList(triangleVertexIndex.toIndexedSeq)\n    val tci = TriangleList(textureCoordinateIndex.toIndexedSeq)\n\n    val mesh = TriangleMesh3D(vc, tvi)\n    val texCoords = VertexPropertyPerTriangle(tvi, tci.triangles.map(_.toIntVector3D), tc)\n    val tex = TextureMappedProperty(tvi, texCoords, texture)\n    ColorNormalMesh3D(mesh, tex, mesh.cellNormals)\n\n  }\n\n  def readBU3DFEbnd(path: Path): Try[IndexedSeq[TLMSLandmark3D]] = {\n    ResourceManagement.usingTry(Try(new FileInputStream(path.toString())))(readBU3DFEbnd)\n  }\n\n  def readBU3DFEbnd(stream: InputStream): Try[IndexedSeq[TLMSLandmark3D]] = Try {\n    // little bit unsafe, we read each line and expect to have a landmark!\n\n    var counter = 0\n    val lines = Source.fromInputStream(stream).getLines()\n    lines.map { line =>\n      val fields = line.split(\"\\\\s+\").map(_.trim)\n\n      counter += 1\n      val name = \"bu3dfe-lm-\" + counter\n      val x = fields(1).toFloat\n      val y = fields(2).toFloat\n      val z = fields(3).toFloat\n      TLMSLandmark3D(name, Point(x, y, z), true)\n    }.toIndexedSeq\n  }\n\n  def readBU3DFEpse(path: Path): Try[IndexedSeq[TLMSLandmark3D]] = {\n    ResourceManagement.usingTry(Try(new FileInputStream(path.toString())))(readBU3DFEpse)\n  }\n\n  def readBU3DFEpse(stream: InputStream): Try[IndexedSeq[TLMSLandmark3D]] = Try {\n    // little bit unsafe, we read each line and expect to have an landmark\n\n    var counter = 0\n    val lines = Source.fromInputStream(stream).getLines().toIndexedSeq\n\n    val idOrder = Array(1, 5, 9, 13, 40, 45, 84, 85).toIndexedSeq\n\n    // filter out pose normal (entry starting with n)\n    val linesFiltered = lines.filterNot(_.substring(0, 1) == \"n\")\n    if (linesFiltered.length == 8) {\n      for (line <- linesFiltered) yield {\n\n        val fields = line.split(\"\\\\s+\").map(_.trim)\n\n        val name = \"bu3dfe-lm-\" + idOrder(counter)\n        counter += 1\n        val x = fields(1).toFloat\n        val y = fields(2).toFloat\n        val z = fields(3).toFloat\n        TLMSLandmark3D(name, Point(x, y, z), true)\n      }\n    } else {\n      // empty list\n      IndexedSeq[TLMSLandmark3D]()\n    }\n  }\n\n  def align(\n    f3dMeshOrig: ColorNormalMesh3D,\n    f3dLandmarks: Seq[Landmark[_3D]],\n    rawMeshOrig: ColorNormalMesh3D,\n    rawLandmarks: Seq[Landmark[_3D]]\n  ): Option[(ColorNormalMesh3D, Seq[Landmark[_3D]])] = {\n\n    if (f3dLandmarks.isEmpty || rawLandmarks.isEmpty) {\n      println(s\"Not enough landmarks: f3dLandmarks.size=${f3dLandmarks.size} / rawLandmarks.size=${rawLandmarks.size}\")\n      return None\n    }\n\n    // landmark transformation\n    val transLM = LandmarkRegistration.rigid3DLandmarkRegistration(f3dLandmarks, rawLandmarks, Point(0,0,0))\n    val transLMNormal: RigidTransformation[_3D] = RigidTransformation(transLM.rotation, TranslationTransform(Vector(0, 0, 0)))\n    val transLMFaces: Transform3D = new Transform3D {\n      override def apply(x: Point[_3D]): Point[_3D] = transLM.f(x)\n      override def apply(v: Vector[_3D]): Vector[_3D] = transLMNormal.f(v.toPoint).toVector\n    }\n\n    var f3dLandmarksT1 = transformLandmarks(f3dLandmarks, transLM)\n\n    var f3dPointsT1 = f3dMeshOrig.transform(transLMFaces)\n\n    val n0 = for (p <- f3dPointsT1.shape.pointSet.points) yield {\n      rawMeshOrig.shape.pointSet.findClosestPoint(p).point\n    }\n    var avgError = f3dPointsT1.shape.pointSet.points.zip(n0).map(p => (p._1 - p._2).norm2).sum / f3dMeshOrig.shape.pointSet.points.toIndexedSeq.length\n\n    var iteration = 0\n    var r = scala.util.Random\n    while (avgError > 0.2 && iteration < 500) {\n      val f3DPointsSampled = for (i <- 1 to 1000) yield {\n        f3dPointsT1.shape.pointSet.pointsWithId.toIndexedSeq(r.nextInt(f3dPointsT1.shape.pointSet.points.length - 1))\n      }\n\n      val nn = for (f3dPoint <- f3DPointsSampled) yield {\n        val rawNeighbor = rawMeshOrig.shape.pointSet.findClosestPoint(f3dPoint._1)\n        if ((f3dPoint._1 - rawNeighbor.point).norm < 5) {\n          (f3dPoint._1, rawNeighbor.point)\n        } else {\n          (None, None)\n        }\n      }\n      val nnFiltered = nn.filterNot(p => p._1 == None || p._2 == None).map(p => (p._1.asInstanceOf[Point3D], p._2.asInstanceOf[Point3D]))\n      if (nnFiltered.length > 0) {\n\n        val transT2 = LandmarkRegistration.rigid3DLandmarkRegistration(nnFiltered, Point(0,0,0))\n        val transT2Normal: RigidTransformation[_3D] = RigidTransformation(transT2.rotation, TranslationTransform(Vector(0, 0, 0)))\n        val transT2Faces: Transform3D = new Transform3D {\n          override def apply(x: Point[_3D]): Point[_3D] = transT2.f(x)\n\n          override def apply(v: Vector[_3D]): Vector[_3D] = transT2Normal.f(v.toPoint).toVector\n        }\n\n        val f3dLandmarksT2 = transformLandmarks(f3dLandmarksT1, transT2)\n        val f3dPointsT2 = f3dPointsT1.transform(transT2Faces)\n        val sampledPointsTrans = transformPoints(nnFiltered.map(p => p._1), transT2)\n\n        avgError = sampledPointsTrans.zip(nnFiltered).map(p => (p._1 - p._2._2).norm).sum / sampledPointsTrans.length\n\n        f3dPointsT1 = f3dPointsT2\n        f3dLandmarksT1 = f3dLandmarksT2\n        iteration = iteration + 1\n      } else {\n        println(\"icp alignment did not find corresponding points!\")\n        avgError = -1\n      }\n\n    }\n    println(\"... alignment error after iteration \" + iteration + \": \" + avgError)\n\n    val outTLMSLandmarks = for (lm <- f3dLandmarksT1.zip(f3dLandmarks)) yield {\n      TLMSLandmark3D(lm._2.id, lm._1.point, true)\n    }\n\n    Some((f3dPointsT1, f3dLandmarksT1))\n  }\n\n  def transformLandmarks(landmarks: Seq[Landmark[_3D]], trans: RigidTransformation[_3D]): Seq[Landmark[_3D]] = {\n    for (lm <- landmarks) yield {\n      Landmark[_3D](lm.id, trans(lm.point), lm.description, lm.uncertainty)\n    }\n  }\n\n  def transformPoints(points: Seq[Point[_3D]], trans: RigidTransformation[_3D]): Seq[Point[_3D]] = {\n    for (p <- points) yield {\n      trans(p)\n    }\n  }\n\n  def parseBu3DWRL(\n    lines: Iterator[String],\n    coordinates: ListBuffer[Point[_3D]],\n    tvi: ListBuffer[TriangleCell],\n    textureCoordinates: ListBuffer[Point[_2D]],\n    tci: ListBuffer[TriangleCell]\n  ): Unit = {\n\n    val BOILERPLATE = \"boilerplate\"\n    val COORDINATES = \"coord Coordinate\"\n    val COORDINATES2 = \"coord Coordinate {\"\n    val TEXTURECOORDS = \"texCoord TextureCoordinate\"\n    val TEXTURECOORDS2 = \"texCoord TextureCoordinate {\"\n    val TEXCOORDINDEX = \"texCoordIndex [\"\n    val VERTEXINDEX = \"coordIndex [\"\n\n    var mode = \"boilerplate\"\n\n    for (line <- lines) {\n      val trimmed = line.trim\n      mode match {\n        case BOILERPLATE => {\n          trimmed match {\n            case COORDINATES => mode = COORDINATES\n            case COORDINATES2 => mode = COORDINATES\n            case TEXTURECOORDS => mode = TEXTURECOORDS\n            case TEXTURECOORDS2 => mode = TEXTURECOORDS\n            case TEXCOORDINDEX => mode = TEXCOORDINDEX\n            case VERTEXINDEX => mode = VERTEXINDEX\n            case _ =>\n          }\n        }\n        case COORDINATES => {\n          trimmed match {\n            case \"{\" =>\n            case \"point\" =>\n            case \"point [\" =>\n            case \"[\" =>\n            case \"\" =>\n            case \"]\" => mode = BOILERPLATE\n            case data => {\n              data.split(',').map(_.trim).filter(s => s.nonEmpty).foreach { tripplet =>\n                val point = Point[_3D](tripplet.split(\" \").map { w =>\n                  try {\n                    w.toDouble\n                  } catch {\n                    case e: Throwable => {\n                      println(w)\n                      throw (e)\n                    }\n                  }\n                })\n                coordinates += point\n              }\n            }\n          }\n        }\n        case TEXTURECOORDS => {\n          trimmed match {\n            case \"{\" =>\n            case \"point\" =>\n            case \"point [\" =>\n            case \"[\" =>\n            case \"\" =>\n            case \"]\" => mode = BOILERPLATE\n            case data => {\n              data.split(\",\").map(_.trim).filter(_.nonEmpty).flatMap(_.split(\" \")).map(_.trim).filter(_.nonEmpty).grouped(2).foreach { pair =>\n                val point = Point[_2D](pair.map { w =>\n                  try {\n                    w.toDouble\n                  } catch {\n                    case e: Throwable => {\n                      println(w)\n                      throw (e)\n                    }\n                  }\n                })\n                textureCoordinates += point\n              }\n            }\n          }\n        }\n        case VERTEXINDEX => {\n          trimmed match {\n            case \"[\" =>\n            case \"\" =>\n            case \"]\" => mode = BOILERPLATE\n            case data => {\n              val stringToParse = if (data.last == ',') data.init else data\n              val indices = stringToParse.split(\" \").map(_.trim).filter(_.nonEmpty).map(_.toInt).toIndexedSeq\n              require(indices.last == -1)\n              val alwaysFirst = indices.head\n              val firstAndlastRemoved = indices.init.tail\n              val seconds = firstAndlastRemoved.init\n              val thirds = firstAndlastRemoved.tail\n              val grouped = seconds.zip(thirds)\n              grouped.foreach { pair =>\n                tvi += TriangleCell(\n                  PointId(alwaysFirst),\n                  PointId(pair._1),\n                  PointId(pair._2)\n                )\n              }\n            }\n          }\n        }\n        case TEXCOORDINDEX => {\n          trimmed match {\n            case \"[\" =>\n            case \"\" =>\n            case \"]\" => mode = BOILERPLATE\n            case data => {\n              val stringToParse = if (data.last == ',') data.init else data\n              val indices = stringToParse.split(\" \").map(_.trim).filter(_.nonEmpty).map(_.toInt).toIndexedSeq\n              require(indices.last == -1)\n              val alwaysFirst = indices.head\n              val firstAndlastRemoved = indices.init.tail\n              val seconds = firstAndlastRemoved.init\n              val thirds = firstAndlastRemoved.tail\n              val grouped = seconds.zip(thirds)\n              grouped.foreach { pair =>\n                tci += TriangleCell(\n                  PointId(alwaysFirst),\n                  PointId(pair._1),\n                  PointId(pair._2)\n                )\n              }\n            }\n          }\n        }\n      }\n    }\n  }\n\n}\n"
  },
  {
    "path": "src/main/scala/preprocessing/PrepareReferenceLandmarks.scala",
    "content": "/*\n * Copyright University of Basel, Graphics and Vision Research Group\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage preprocessing\n\nimport breeze.linalg.{DenseMatrix, DenseVector}\nimport ch.unibas.cs.gravis.facepipeline.BU3DDataProvider\nimport ch.unibas.cs.gravis.facepipeline.BU3DDataProvider.Expressions\nimport ch.unibas.cs.gravis.facepipeline.{DataProvider, PipelineStep}\nimport scalismo.faces.io.TLMSLandmarksIO\nimport scalismo.statisticalmodel.MultivariateNormalDistribution\n\nobject PrepareReferenceLandmarks {\n\n  def main(args: Array[String]): Unit = {\n    scalismo.initialize()\n\n    PrepareReferenceLandmarks(BU3DDataProvider).run()\n  }\n\n}\n\ncase class PrepareReferenceLandmarks(dataProvider : DataProvider) extends PipelineStep {\n\n  override def run(): Unit = {\n\n    scalismo.initialize()\n\n    val rawRefLmsFile = (dataProvider.repositoryRoot / \"data\" / \"incoming\" / \"reference\" / \"landmarks\" / \"mean2012_l7_bfm_nomouth.tlms\").jfile\n\n    val referenceLandmarksTLMS = TLMSLandmarksIO.read3D(rawRefLmsFile).get\n    val referenceLandmarks = for (lmTlms <- referenceLandmarksTLMS if lmTlms.visible) yield {\n      val lm = lmTlms.toLandmark\n      val noiseVariance = lm.id.trim match {\n        case lmid if lmid.contains(\"eyebrow\") => 3.0\n        case lmid if lmid.contains(\"eye.bottom\") => 3.0\n        case lmid if lmid.contains(\"eye.top\") => 3.0\n        case _ => 1.0\n      }\n      lm.copy(uncertainty = Some(MultivariateNormalDistribution(DenseVector.zeros[Double](3), DenseMatrix.eye[Double](3) * noiseVariance)))\n    }\n\n    // Transfer the reference landmarks to all the expressions and save them.\n    for (expression <- Expressions.expressionModelTypes()) {\n\n      val neutralRef = dataProvider.incoming.reference.loadMesh(dataProvider.Neutral).get\n      val expressionRef = dataProvider.registration.loadPriorModel(expression).get.referenceMesh\n      val expressionLms = for (lm <- referenceLandmarks) yield {\n        val id = neutralRef.pointSet.findClosestPoint(lm.point).id\n        lm.copy(point = expressionRef.pointSet.point(id))\n      }\n\n      dataProvider.incoming.reference.saveLandmarks(expression, expressionLms)\n    }\n  }\n\n}"
  },
  {
    "path": "src/main/scala/registration/Registration.scala",
    "content": "/*\n * Copyright University of Basel, Graphics and Vision Research Group\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage registration\n\nimport breeze.linalg.DenseVector\nimport ch.unibas.cs.gravis.facepipeline._\nimport com.typesafe.scalalogging.LazyLogging\nimport ch.unibas.cs.gravis.facepipeline.BU3DDataProvider._\nimport _root_.registration.utils.VisualLogger\nimport _root_.registration.modelbuilding.FaceMask\nimport _root_.registration.metrics.HuberDistanceMetric\nimport scalismo.common.{PointId, UnstructuredPointsDomain}\nimport scalismo.geometry.{Landmark, Point, _3D}\nimport scalismo.mesh.{MeshBoundaryPredicates, TriangleMesh, TriangleMesh3DOperations}\nimport scalismo.numerics.{LBFGSOptimizer, Sampler, UniformMeshSampler3D}\nimport scalismo.registration._\nimport scalismo.statisticalmodel.{DiscreteLowRankGaussianProcess, StatisticalMeshModel}\nimport scalismo.utils.Random\n\ncase class Registration(dataProvider: DataProvider) extends PipelineStep with LazyLogging {\n\n  type CoefficientVector = DenseVector[Double]\n\n  case class LandmarkPair(referenceLandmark: Landmark[_3D], targetLandmark: Landmark[_3D])\n\n  case class LevelConfig(regularizationWeight : Double, outlierThreshold : Option[Double], numBasisFunctions : Int)\n\n  case class OutlierAwarePointSampler(referenceMesh: TriangleMesh[_3D], sampledNumberOfPoints: Int, isValidTargetPoint: Point[_3D] => Boolean)(implicit rand: Random) extends Sampler[_3D] with LazyLogging {\n\n\n    private val points = UniformMeshSampler3D(referenceMesh, sampledNumberOfPoints).sample()(rand).map(_._1)\n    private val validPointsOnly = points.filter(isValidTargetPoint)\n    override val numberOfPoints: Int = validPointsOnly.size\n    logger.info(s\"sampling $numberOfPoints points\")\n    override def volumeOfSampleRegion: Double = referenceMesh.area\n\n    override def sample()(implicit rand: Random): IndexedSeq[(Point[_3D], Double)] = {\n      validPointsOnly.map(p => (p, 1.0 / referenceMesh.area))\n    }\n\n  }\n\n  def registration(gpModel: StatisticalMeshModel,\n                   targetMesh: TriangleMesh[_3D],\n                   faceMask : FaceMask,\n                   landmarkPairs: Seq[LandmarkPair]): TriangleMesh[_3D] = {\n\n    val referenceMesh = gpModel.referenceMesh\n\n    VisualLogger.showTargetMesh(targetMesh)\n\n    val landmarkConstraints = for (landmarkPair <- landmarkPairs.toIndexedSeq) yield {\n      val referencePointId = referenceMesh.pointSet.findClosestPoint(landmarkPair.referenceLandmark.point).id\n      val targetPoint = landmarkPair.targetLandmark.point\n      (referencePointId, targetPoint, landmarkPair.referenceLandmark.uncertainty.get)\n    }\n\n    val posteriorModel = gpModel.posterior(landmarkConstraints)\n\n    VisualLogger.ui.map(_.show(posteriorModel,\"M\"))\n\n    var initialCoefficients = DenseVector.zeros[Double](posteriorModel.rank)\n\n    val levelConfigs = Seq(LevelConfig(1.0, None, gpModel.rank),\n                           LevelConfig(1E-1, None, gpModel.rank),\n                           LevelConfig(1E-3, None, gpModel.rank),\n                          LevelConfig(1E-4, Some(4.0), gpModel.rank),\n                          LevelConfig(1E-5, Some(2.0), gpModel.rank),\n                          LevelConfig(1E-6, Some(1.0), gpModel.rank)\n    )\n    val finalCoefficients = levelConfigs.foldLeft[DenseVector[Double]](initialCoefficients){\n      case(currentCoefficients, levelConfig) => {\n        registrationForLevel(posteriorModel, targetMesh, faceMask, levelConfig, numberOfIterations = 20, currentCoefficients)\n      }\n    }\n\n    posteriorModel.instance(finalCoefficients)\n\n  }\n\n  def registrationForLevel(gpModel: StatisticalMeshModel,\n                           targetMesh : TriangleMesh[_3D],\n                           faceMask: FaceMask,\n                           levelConfig : LevelConfig,\n                           numberOfIterations: Int,\n                           initialCoefficients: CoefficientVector): CoefficientVector = {\n\n    val LevelConfig(regularizationWeight, outlierThreshold,  numBasisFunctions) = levelConfig\n\n\n    val reducedGPModel = reduceModel(gpModel, numBasisFunctions)\n    val reducedInitialCoefficients = initialCoefficients(0 until numBasisFunctions)\n\n    val referenceMesh = reducedGPModel.referenceMesh\n    val currentFit = reducedGPModel.instance(reducedInitialCoefficients)\n\n\n    VisualLogger.showStatisticalShapeModel(reducedGPModel)\n    VisualLogger.updateModelView(reducedInitialCoefficients)\n\n    // here we need to compute a new posterior based on the line landmarks\n\n    def isValidTargetPoint(currentFit: TriangleMesh[_3D],\n                           targetMeshOps: TriangleMesh3DOperations,\n                           targetMeshBoundary: UnstructuredPointsDomain[_3D])\n                          (p: Point[_3D]): Boolean = {\n\n      val ptId = referenceMesh.pointSet.findClosestPoint(p).id\n      val closestPt = targetMeshOps.closestPointOnSurface(currentFit.pointSet.point(ptId))\n      val closestPtId = targetMesh.pointSet.findClosestPoint(closestPt.point).id\n\n      def isOnValidBoundary(ptId : PointId, closestPtId : PointId) : Boolean = {\n\n        if(faceMask.isLipPoint(ptId)) {\n          true\n        } else {\n          (closestPt.point - targetMeshBoundary.findClosestPoint(closestPt.point).point).norm > 8.0 // Points that are close to a border\n        }\n\n      }\n\n      def getOutlierTreshold(ptId : PointId) : Double = {\n\n        if(faceMask.isLipPoint(ptId)) {\n          Double.MaxValue\n        } else {\n          outlierThreshold.getOrElse(Double.MaxValue)\n        }\n      }\n\n\n      Math.sqrt(closestPt.distanceSquared) < getOutlierTreshold(ptId) &&\n        isOnValidBoundary(ptId,closestPtId) && !faceMask.isEarRegion(ptId) && !faceMask.isNoseRegion(ptId)\n    }\n\n    val targetMeshBoundaryPred = MeshBoundaryPredicates(targetMesh)\n    val targetMeshBoundary = UnstructuredPointsDomain(targetMesh.pointSet.pointIds\n      .filter(targetMeshBoundaryPred.pointIsOnBoundary)\n      .map(targetMesh.pointSet.point).toIndexedSeq\n    )\n\n    val optimizationPointSampler = OutlierAwarePointSampler(referenceMesh,\n      sampledNumberOfPoints = referenceMesh.pointSet.numberOfPoints,\n      isValidTargetPoint(currentFit, targetMesh.operations, targetMeshBoundary))\n\n\n    val config = RegistrationConfiguration[_3D, GaussianProcessTransformationSpace[_3D]](\n      optimizer = LBFGSOptimizer(numIterations = numberOfIterations),\n      metric = HuberDistanceMetric[_3D](optimizationPointSampler),\n      transformationSpace = GaussianProcessTransformationSpace(reducedGPModel.gp.interpolateNearestNeighbor),\n      regularizer = L2Regularizer,\n      regularizationWeight = regularizationWeight)\n\n    // Scalismo implements registration always as image to image registration.\n    // Therefore we compute distance images from the meshes\n    val fixedImage = referenceMesh.operations.toDistanceImage\n    val movingImage = targetMesh.operations.toDistanceImage\n\n    val registrationIterator = scalismo.registration.Registration.iterations(config)(fixedImage, movingImage, reducedInitialCoefficients)\n    val iteratorWithLogging = for ((regState, itNum) <- registrationIterator.zipWithIndex) yield {\n      logger.debug(s\"Iteration $itNum: value = ${regState.optimizerState.value}\")\n      VisualLogger.updateModelView(regState.optimizerState.parameters)\n      regState\n    }\n\n    val lastRegistrationState = iteratorWithLogging.toSeq.last\n\n\n    val fullFinalParameters = DenseVector.zeros[Double](initialCoefficients.length)\n    fullFinalParameters(0 until numBasisFunctions) := lastRegistrationState.optimizerState.parameters\n    fullFinalParameters\n  }\n\n\n  private def reduceModel(model : StatisticalMeshModel, numBasisFunctions : Int) : StatisticalMeshModel = {\n    val reducedGp = DiscreteLowRankGaussianProcess(model.gp.mean, model.gp.klBasis.take(numBasisFunctions))\n    model.copy(gp = reducedGp)\n  }\n\n  override def run(): Unit = {\n    // transforms the mesh using the best similarity transform between the reference and target landmarks.\n\n    for (expression <- Seq(Neutral,Sadness,Joy,Disgust,Anger,Fear,Surprise).reverse) {\n\n      val referenceLandmarks = dataProvider.incoming.reference.loadLandmarks(expression = if(expression == Neutral) Neutral else CoreExpression).get\n      val model = dataProvider.registration.loadPriorModel(expression = if(expression == Neutral) Neutral else CoreExpression).get\n\n      val faceMask = dataProvider.incoming.reference.loadFaceMask().get\n\n      logger.info(\"Successfully loaded reference and model\")\n\n      for (id <- scala.util.Random.shuffle(dataProvider.incoming.ids(expression)) if dataProvider.registration.loadMesh(id,expression).isFailure &&  dataProvider.incoming.loadLandmarks(id,expression).isSuccess) {\n\n        logger.info(\"Performing registration for id \" + id)\n        val targetMesh = dataProvider.incoming.loadMesh(id,expression).get\n        val targetLandmarks = dataProvider.incoming.loadLandmarks(id,expression).get\n\n        val correspondingLandmarks = correspondingLandmarkPairs(referenceLandmarks, targetLandmarks)\n\n        val correspondingLandmarkPoints = correspondingLandmarks.map(lmPair => (lmPair.targetLandmark.point, lmPair.referenceLandmark.point))\n        val alignmentTransform = LandmarkRegistration.similarity3DLandmarkRegistration(correspondingLandmarkPoints, center = Point(0.0, 0.0, 0.0))\n        val alignedTargetMesh = targetMesh.transform(alignmentTransform)\n        val alignedLandmarkPairs = correspondingLandmarks.map(lmPair =>\n          LandmarkPair(lmPair.referenceLandmark, lmPair.targetLandmark.transform(alignmentTransform))\n        )\n\n        VisualLogger.ui.map(_.show(alignedLandmarkPairs.map(_.targetLandmark),\"Test\"))\n        VisualLogger.ui.map(_.show(alignedLandmarkPairs.map(_.referenceLandmark),\"Test\"))\n        val registeredMesh = registration(model, alignedTargetMesh, faceMask, alignedLandmarkPairs)\n\n        // we realign the registered mesh with the target.\n        val registeredMeshOrigSpace = registeredMesh.transform(alignmentTransform.inverse)\n        dataProvider.registration.saveMesh(id,expression, registeredMeshOrigSpace)\n      }\n    }\n  }\n\n  private def correspondingLandmarkPairs(referenceLandmarks: Seq[Landmark[_3D]], targetLandmarks: Seq[Landmark[_3D]]): Seq[LandmarkPair] = {\n\n    referenceLandmarks\n      .map(refLm => (refLm, targetLandmarks.find(targetLm => targetLm.id == refLm.id)))\n      .filter(lmTuple => lmTuple._2.nonEmpty)\n      .map(lmTuple => LandmarkPair(lmTuple._1, lmTuple._2.get))\n  }\n\n\n}\n\nobject Registration {\n\n  def main(args: Array[String]): Unit = {\n\n    scalismo.initialize()\n    Registration(BU3DDataProvider).run()\n\n  }\n}\n"
  },
  {
    "path": "src/main/scala/registration/experiments/Bu3DFELandmarkEvaluation.scala",
    "content": "/*\n * Copyright University of Basel, Graphics and Vision Research Group\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage registration.experiments\n\nimport ch.unibas.cs.gravis.facepipeline.BU3DDataProvider.Neutral\nimport ch.unibas.cs.gravis.facepipeline.BU3DDataProvider._\nimport scalismo.geometry.{Landmark,_3D}\nimport scalismo.io.LandmarkIO\nimport java.io.File\nimport breeze.linalg.DenseVector\nimport breeze.numerics.sqrt\nimport ch.unibas.cs.gravis.facepipeline.BU3DDataProvider\nimport scalismo.common.PointId\n\ncase class FeaturePoints(f : Feature, points : Seq[PointId], lms: Seq[Landmark[_3D]])\n\ncase class Feature(lmids: Seq[Int],name: String) {\n  override def toString: String = name\n}\n\nobject Bu3DFELandmarkEvaluation {\n\n  def main(args: Array[String]): Unit = {\n\n    scalismo.initialize()\n\n    val dataProvider = BU3DDataProvider\n\n    val eyeLeft = Feature(Seq(2,3,4,6,7,8),\"eye.left\")\n    val eyeRight = Feature(Seq(10,11,12,14,15,16),\"eye.right\")\n    val eyebrowLeft = Feature((17 to 26),\"eyebrow.left\")\n    val eyebrowRight = Feature((27 to 36),\"eyebrow.right\")\n    val nose = Feature((37 to 47),\"nose\")\n    val mouth = Feature((49 to 60),\"mouth\")\n    val faceRight = Feature((69 to 74),\"face.right\")\n    val faceLeft = Feature((78 to 83),\"face.left\")\n    val chin = Feature((75 to 77),\"chin\")\n\n    val features = Seq(eyeLeft,eyeRight,eyebrowLeft,eyebrowRight,nose,mouth,faceRight,faceLeft,chin)\n\n    val referenceMesh = dataProvider.incoming.reference.loadMesh(Neutral).get\n\n    val bu3DreferenceLandmarks = {\n      LandmarkIO.readLandmarksCsv[_3D](new File(dataProvider.repositoryRoot.jfile,\"/data/incoming/reference/landmarks/mean2012-bu3dfe-eval-landmarks.csv\")).get\n    }\n\n    val featuresWithLandmarks = features.map{\n      f =>\n        val ids = f.lmids\n        val lms = ids.map(id => bu3DreferenceLandmarks.find(lm => lm.id.replace(\"bu3dfe-lm-\",\"\") == s\"$id\").get)\n        val points = lms.map(lm => referenceMesh.pointSet.findClosestPoint(lm.point).id)\n        FeaturePoints(f,points,lms)\n    }\n\n    val finalRes = for (expression <- Seq(Neutral,Sadness,Joy,Disgust,Anger,Fear,Surprise).reverse) yield {\n\n      val evalPerSample = for (id <- dataProvider.incoming.ids(expression) if dataProvider.incoming.loadLandmarks(id,expression, F3D, BU3DDataProvider.Aligned).isSuccess && dataProvider.incoming.loadLandmarks(id, expression).isSuccess) yield {\n\n        val bu3Dlandmarks = dataProvider.incoming.loadLandmarks(id, expression, F3D, BU3DDataProvider.Aligned).get\n        val registeredMesh = dataProvider.registration.loadMesh(id, expression).get\n\n        val alldists = for (f <- featuresWithLandmarks) yield {\n\n          val dists = for (id <- f.lms.zip(f.points)) yield {\n\n            val registeredPoint = registeredMesh.pointSet.point(id._2)\n            val gtPoint = bu3Dlandmarks.find(lm => lm.id == id._1.id).get.point\n\n            val dist = sqrt((registeredPoint - gtPoint).norm2)\n            dist\n\n          }\n\n          (dists.sum / dists.length)\n\n        }\n\n        alldists\n\n      }\n\n      evalPerSample\n\n    }\n\n    println(\"Landmark Evaluation Result:\")\n\n    val table = for((f,i) <- features.zipWithIndex) yield {\n      val data = DenseVector(finalRes.map(d => d.map(_(i))).flatten.toArray)\n      val error = breeze.stats.meanAndVariance(data)\n      Seq(f.name,s\"${error.mean}\",s\"${error.stdDev}\")\n    }\n\n    println(Tabulator.format(Seq(Seq(\"Region\",\"Mean\",\"Std\")) ++ table))\n\n  }\n}\n\nobject Tabulator {\n  def format(table: Seq[Seq[Any]]) = table match {\n    case Seq() => \"\"\n    case _ =>\n      val sizes = for (row <- table) yield (for (cell <- row) yield if (cell == null) 0 else cell.toString.length)\n      val colSizes = for (col <- sizes.transpose) yield col.max\n      val rows = for (row <- table) yield formatRow(row, colSizes)\n      formatRows(rowSeparator(colSizes), rows)\n  }\n\n  def formatRows(rowSeparator: String, rows: Seq[String]): String = (\n    rowSeparator ::\n      rows.head ::\n      rowSeparator ::\n      rows.tail.toList :::\n      rowSeparator ::\n      List()).mkString(\"\\n\")\n\n  def formatRow(row: Seq[Any], colSizes: Seq[Int]) = {\n    val cells = (for ((item, size) <- row.zip(colSizes)) yield if (size == 0) \"\" else (\"%\" + size + \"s\").format(item))\n    cells.mkString(\"|\", \"|\", \"|\")\n  }\n\n  def rowSeparator(colSizes: Seq[Int]) = colSizes map { \"-\" * _ } mkString(\"+\", \"+\", \"+\")\n}\n\n"
  },
  {
    "path": "src/main/scala/registration/metrics/HuberDistanceMetric.scala",
    "content": "/*\n * Copyright University of Basel, Graphics and Vision Research Group\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage registration.metrics\n\nimport scalismo.common.Domain\nimport scalismo.geometry.{Dim, NDSpace, Point}\nimport scalismo.image.{DifferentiableScalarImage, ScalarImage}\nimport scalismo.numerics.Sampler\nimport scalismo.registration.{ImageMetric, Transformation}\n\ncase class HuberDistanceMetric[D <: Dim: NDSpace](sampler: Sampler[D]) extends ImageMetric[D] {\n\n\n  override val ndSpace = implicitly[NDSpace[D]]\n\n  def value(fixedImage: ScalarImage[D], movingImage: ScalarImage[D], transform: Transformation[D]) = {\n    val warpedImage = fixedImage.compose(transform)\n\n\n    def rhoHuber(v : Float ) : Float = {\n      val k = 1.345\n      if (v < k)\n        (v * v / 2f) / (1 + v * v)\n      else\n        (k * ( Math.abs(v) - k / 2 )).toFloat\n    }\n    integrator.integrateScalar((warpedImage - movingImage).andThen(rhoHuber _)) / integrator.sampler.volumeOfSampleRegion\n  }\n\n\n  def takeDerivativeWRTToTransform(fixedImage: DifferentiableScalarImage[D], movingImage: ScalarImage[D], transform: Transformation[D]) = {\n\n    def psiHuber(v : Float) : Float = {\n      val k = 1.345\n      if (v < k) v else (k * Math.signum(v)).toFloat\n    }\n\n    val movingGradientImage = fixedImage.differentiate\n    val warpedImage = fixedImage.compose(transform)\n    val dDMovingImage = (warpedImage - movingImage).andThen(psiHuber _) * (1.0 / sampler.volumeOfSampleRegion)\n\n    val fullMetricGradient = (x: Point[D]) => {\n      val domain = Domain.intersection(warpedImage.domain, dDMovingImage.domain)\n      if (domain.isDefinedAt(x))\n        Some(movingGradientImage(transform(x)).toFloatBreezeVector * dDMovingImage(x))\n      else None\n    }\n\n    fullMetricGradient\n  }\n}"
  },
  {
    "path": "src/main/scala/registration/modelbuilding/BuildCoreExpressionPrior.scala",
    "content": "/*\n * Copyright University of Basel, Graphics and Vision Research Group\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage registration.modelbuilding\n\nimport ch.unibas.cs.gravis.facepipeline.BU3DDataProvider._\nimport ch.unibas.cs.gravis.facepipeline.{BU3DDataProvider, DataProvider, PipelineStep}\nimport com.typesafe.scalalogging.StrictLogging\nimport scalismo.common._\nimport scalismo.geometry.{Point, Vector, _3D}\nimport scalismo.statisticalmodel._\n\ncase class BuildCoreExpressionPrior(dataProvider: DataProvider) extends PipelineStep with StrictLogging {\n\n  def run(): Unit = {\n\n    scalismo.initialize()\n\n    val dataProvider = BU3DDataProvider\n\n    logger.info(\"load reference mesh ...\")\n\n    val referenceMesh = dataProvider.incoming.reference.loadMesh(Neutral).get\n\n    logger.info(\"make core model from expressions ...\")\n\n    val references = for(exp <- Seq(Neutral, Sadness, Joy, Disgust, Anger, Fear, Surprise) ) yield {\n\n      val expression = dataProvider.incoming.reference.loadMesh(exp).get\n\n      def t(p : Point[_3D]) : Vector[_3D] = {\n        val id = referenceMesh.pointSet.findClosestPoint(p).id\n        val cp = expression.pointSet.point(id)\n        cp - p\n      }\n\n      Field[_3D,Vector[_3D]](RealSpace[_3D],t)\n\n    }\n\n    logger.info(\"augment neutral with core model ...\")\n\n    val ssm = StatisticalMeshModel.createUsingPCA(referenceMesh,references)\n    val neutralModel = dataProvider.registration.loadPriorModel(Neutral).get\n    val augmentedExpressionModel = StatisticalMeshModel.augmentModel(ssm,neutralModel.gp.interpolateNearestNeighbor)\n\n    logger.info(\"save core model ...\")\n\n    dataProvider.registration.savePriorModel(augmentedExpressionModel, dataProvider.CoreExpression)\n\n  }\n\n}\n\nobject BuildCoreExpressionPrior {\n\n  def main(args: Array[String]): Unit = {\n\n    BuildCoreExpressionPrior(BU3DDataProvider).run()\n\n  }\n\n}\n"
  },
  {
    "path": "src/main/scala/registration/modelbuilding/BuildNeutralPrior.scala",
    "content": "/*\n * Copyright University of Basel, Graphics and Vision Research Group\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage registration.modelbuilding\n\nimport breeze.linalg.DenseMatrix\nimport ch.unibas.cs.gravis.facepipeline.BU3DDataProvider._\nimport ch.unibas.cs.gravis.facepipeline._\nimport com.typesafe.scalalogging.StrictLogging\nimport scalismo.common._\nimport scalismo.geometry.{Vector, _3D}\nimport scalismo.numerics.PivotedCholesky\nimport scalismo.numerics.PivotedCholesky.StoppingCriterion\nimport scalismo.statisticalmodel._\n\ncase class BuildNeutralPrior(dataProvider: DataProvider) extends PipelineStep with StrictLogging {\n\n\n  def approximatePointSet(points: UnstructuredPointsDomain[_3D], D: Double, gp : GaussianProcess[_3D,Vector[_3D]], sc: StoppingCriterion) : (DiscreteLowRankGaussianProcess[_3D,Vector[_3D]],UnstructuredPointsDomain[_3D]) = {\n\n    def phiWithDim(i: Int, dim : Int, ptId : Int, phi: DenseMatrix[Double]) = {\n      phi(ptId*3 + dim,i)\n    }\n\n    def phiVec(i : Int, ptID : PointId,phi : DenseMatrix[Double]) = {\n      Vector(phiWithDim(i,0,ptID.id,phi),phiWithDim(i,1,ptID.id,phi),phiWithDim(i,2,ptID.id,phi))\n    }\n    val (phi,lambda) = PivotedCholesky.computeApproximateEig(gp.cov,points.points.toIndexedSeq,D,sc)\n\n    val nPhi = phi.cols\n\n    val klBasis: DiscreteLowRankGaussianProcess.KLBasis[_3D, Vector[_3D]] = for(i <- 0 until nPhi) yield {\n      val v = DiscreteField[_3D,Vector[_3D]](points,points.pointsWithId.toIndexedSeq.map(f => phiVec(i,f._2,phi)))\n      DiscreteLowRankGaussianProcess.Eigenpair(lambda(i),v)\n    }\n    val mean = DiscreteField[_3D,Vector[_3D]](points,points.points.toIndexedSeq.map(p => gp.mean(p)))\n\n    val r = DiscreteLowRankGaussianProcess[_3D,Vector[_3D]](mean, klBasis)\n    (r,points)\n\n  }\n\n\n  override def run(): Unit = {\n\n    scalismo.initialize()\n\n    logger.info(s\"building model for neutral expression\")\n    val referenceMesh = dataProvider.incoming.reference.loadMesh(Neutral).get\n    val mask = dataProvider.incoming.reference.loadFaceMask().get\n    val faceKernel = FaceKernel(mask,referenceMesh)\n    val gp = GaussianProcess[_3D, Vector[_3D]](faceKernel)\n    val ldg = approximatePointSet(referenceMesh.pointSet, 1.0, gp, PivotedCholesky.NumberOfEigenfunctions(1000))\n    val lowRankGaussianProcess = ldg._1.interpolateNearestNeighbor\n    logger.info(\"computed nystrom approximation\")\n\n    val model = StatisticalMeshModel(referenceMesh, lowRankGaussianProcess)\n\n    dataProvider.registration.savePriorModel(model, Neutral)\n\n    logger.info(\"model building done\")\n\n  }\n\n}\n\nobject BuildNeutralPrior {\n\n  def main(args: Array[String]): Unit = {\n    BuildNeutralPrior(BU3DDataProvider).run()\n  }\n\n}\n\n"
  },
  {
    "path": "src/main/scala/registration/modelbuilding/FaceKernel.scala",
    "content": "/*\n * Copyright University of Basel, Graphics and Vision Research Group\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage registration.modelbuilding\n\nimport breeze.linalg.DenseMatrix\nimport registration.modelbuilding.FaceKernel.LevelWithScale\nimport scalismo.common._\nimport scalismo.geometry.{Point, SquareMatrix, _3D}\nimport scalismo.kernels.{BSplineKernel, DiagonalKernel, MatrixValuedPDKernel}\nimport scalismo.mesh.TriangleMesh\n\ncase class SpatiallyVaryingMultiscaleKernel(levelsWithScale : Seq[LevelWithScale],\n                                            mask: FaceMask, referenceMesh: TriangleMesh[_3D]) extends MatrixValuedPDKernel[_3D] {\n\n\n  val bSplineKernel = DiagonalKernel(BSplineKernel[_3D](order = 3, scale = 0), 3)\n\n  val smoothedRegionWeights = levelsWithScale.map(levelWithScale =>\n    (levelWithScale.level, mask.computeSmoothedRegions(referenceMesh,levelWithScale.level, 40))\n  ).toMap\n\n  def k(x: Point[_3D], y: Point[_3D]): DenseMatrix[Double] = {\n\n    var sum = SquareMatrix.zeros[_3D].toBreezeMatrix\n\n    for (LevelWithScale(level, scale) <- levelsWithScale) {\n\n      val weightX = smoothedRegionWeights(level)(x)\n      val weightY = smoothedRegionWeights(level)(y)\n\n      sum += bSplineKernel((x.toVector * Math.pow(2, level)).toPoint, (y.toVector * Math.pow(2, level)).toPoint) * scale * weightX * weightY\n\n    }\n\n    sum\n  }\n\n  override def outputDim = 3\n\n  override def domain = RealSpace[_3D]\n\n}\n\ncase class FaceKernel(faceMask : FaceMask, referenceMesh: TriangleMesh[_3D]) extends MatrixValuedPDKernel[_3D] {\n\n\n  private val faceKernel = {\n\n    val levelsAndScales =   Seq(\n      LevelWithScale(-6,128.0),\n      LevelWithScale(-5, 64.0),\n      LevelWithScale(-4, 32.0),\n      LevelWithScale(-3, 10.0),\n      LevelWithScale(-2, 4.0))\n\n    val spatiallyVaryingKernel = SpatiallyVaryingMultiscaleKernel(levelsAndScales, faceMask, referenceMesh)\n\n    val symmetricKernel = symmetrize(spatiallyVaryingKernel)\n\n    symmetricKernel * 0.7 + spatiallyVaryingKernel * 0.3\n  }\n\n  override protected def k(x: Point[_3D], y: Point[_3D]): DenseMatrix[Double] = faceKernel(x,y)\n\n  override def domain: Domain[_3D] = RealSpace[_3D]\n\n  override def outputDim: Int = 3\n\n  private def symmetrize(kernel: MatrixValuedPDKernel[_3D]) :  MatrixValuedPDKernel[_3D] = {\n\n    new MatrixValuedPDKernel[_3D] {\n      override def outputDim = 3\n\n      override def k(x: Point[_3D], y: Point[_3D]): DenseMatrix[Double] = {\n\n        val ybar = Point(-y.x, y.y, y.z)\n        val xbar = Point(-x.x, x.y, x.z)\n\n        val I = DenseMatrix.eye[Double](3)\n        I(0, 0) = 1\n\n        val IBar = DenseMatrix.eye[Double](3)\n        IBar(0, 0) = -1\n\n        I * kernel(x, y) + IBar * (kernel(x, ybar))\n      }\n\n      override def domain: Domain[_3D] = kernel.domain\n    }\n\n  }\n\n\n}\n\nobject FaceKernel {\n  case class LevelWithScale(level : Int, scale : Double)\n}\n"
  },
  {
    "path": "src/main/scala/registration/modelbuilding/FaceMask.scala",
    "content": "/*\n * Copyright University of Basel, Graphics and Vision Research Group\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage registration.modelbuilding\n\nimport scalismo.common.{PointId, UnstructuredPointsDomain}\nimport scalismo.geometry.{Point, _3D}\nimport scalismo.kernels.GaussianKernel\nimport scalismo.mesh.{ScalarMeshField, TriangleMesh}\nimport scalismo.utils.Memoize\n\ncase class FaceMask(levelMask: ScalarMeshField[Int], semanticMask: ScalarMeshField[Int]) {\n\n  def isEarRegion(id : PointId) : Boolean = {\n    semanticMask(id) == 1\n  }\n\n  def isLipPoint(id : PointId) : Boolean = {\n    semanticMask(id) == 2\n  }\n\n  def isNoseRegion(id : PointId) : Boolean = {\n    semanticMask(id) == 3\n  }\n\n  // Returns a value in the interval [0,1] indicating whether a point belongs to the region\n  def computeSmoothedRegions(referenceMesh: TriangleMesh[_3D], level : Int, stddev : Double) : Point[_3D] => Double = {\n\n    val transformedMask = levelMask.copy(mesh = referenceMesh)\n    val pointsWithRegions = transformedMask.pointsWithValues.toIndexedSeq\n\n    val regionSmoother = GaussianKernel[_3D](stddev)\n    val regionPts = UnstructuredPointsDomain(pointsWithRegions.filter(_._2 >= level).map(_._1))\n\n    def regionWeight(p : Point[_3D]) : Double = {\n      regionSmoother(regionPts.findClosestPoint(p).point,p)\n    }\n\n    Memoize(regionWeight,referenceMesh.pointSet.numberOfPoints)\n  }\n\n}\n\n"
  },
  {
    "path": "src/main/scala/registration/utils/VisualLogger.scala",
    "content": "/*\n * Copyright University of Basel, Graphics and Vision Research Group\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage registration.utils\n\nimport java.awt.Color\n\nimport breeze.linalg.DenseVector\nimport scalismo.geometry._3D\nimport scalismo.mesh.TriangleMesh\nimport scalismo.statisticalmodel.StatisticalMeshModel\nimport scalismo.ui.api._\n\nobject VisualLogger {\n  var ui : Option[ScalismoUI] = None//Some(ScalismoUI(\"Visual Logger\"))\n\n  val modelGroup = ui.map(_.createGroup(\"Model\"))\n  var modelView : Option[StatisticalMeshModelViewControls] = None\n\n  val targetGroup = ui.map(_.createGroup(\"Target\"))\n  var targetMeshView : Option[TriangleMeshView] = None\n\n\n\n  def showTargetMesh(targetMesh : TriangleMesh[_3D]) : Unit = {\n    remove(targetMeshView)\n    targetMeshView = show(VisualLogger.targetGroup, targetMesh, \"target\")\n    targetMeshView.map(_.color = Color.RED)\n  }\n\n  def showStatisticalShapeModel(ssm : StatisticalMeshModel) : Unit = {\n    removeModel(modelView)\n    modelView = show(modelGroup, ssm, \"gpmodel\")\n    modelView.map(_.meshView.opacity = 0.7)\n  }\n\n  def updateModelView(coeffs : DenseVector[Double]) : Unit = {\n    if (modelView.isDefined) {\n      modelView.get.shapeModelTransformationView.shapeTransformationView.coefficients = coeffs\n    }\n  }\n\n\n  private def show[A](group : Option[Group], t : A, name : String)(implicit sic : ShowInScene[A]): Option[sic.View] = {\n    for {\n      ui <- ui\n      g <- group\n    } yield {\n      ui.show(g, t, name)\n    }\n  }\n\n  def remove[V <: ObjectView](view : Option[V]): Unit = {\n    view.foreach(_.remove())\n  }\n\n  def removeModel(view : Option[StatisticalMeshModelViewControls]): Unit = {\n    for {v <- view} {\n      v.meshView.remove()\n      v.shapeModelTransformationView.remove()\n    }\n  }\n\n}"
  }
]